[ 567.208121] env[68144]: Modules with known eventlet monkey patching issues were imported prior to eventlet monkey patching: urllib3. This warning can usually be ignored if the caller is only importing and not executing nova code. [ 567.835715] env[68194]: Modules with known eventlet monkey patching issues were imported prior to eventlet monkey patching: urllib3. This warning can usually be ignored if the caller is only importing and not executing nova code. [ 569.197088] env[68194]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'linux_bridge' {{(pid=68194) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 569.197415] env[68194]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'noop' {{(pid=68194) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 569.197539] env[68194]: DEBUG os_vif [-] Loaded VIF plugin class '' with name 'ovs' {{(pid=68194) initialize /opt/stack/data/venv/lib/python3.10/site-packages/os_vif/__init__.py:44}} [ 569.197844] env[68194]: INFO os_vif [-] Loaded VIF plugins: linux_bridge, noop, ovs [ 569.395876] env[68194]: DEBUG oslo_concurrency.processutils [-] Running cmd (subprocess): grep -F node.session.scan /sbin/iscsiadm {{(pid=68194) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:384}} [ 569.406420] env[68194]: DEBUG oslo_concurrency.processutils [-] CMD "grep -F node.session.scan /sbin/iscsiadm" returned: 0 in 0.011s {{(pid=68194) execute /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/processutils.py:422}} [ 569.509721] env[68194]: INFO nova.virt.driver [None req-54793f71-e777-47e0-96c4-bd1d44ee6030 None None] Loading compute driver 'vmwareapi.VMwareVCDriver' [ 569.584795] env[68194]: DEBUG oslo_concurrency.lockutils [-] Acquiring lock "oslo_vmware_api_lock" by "oslo_vmware.api.VMwareAPISession._create_session" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 569.584965] env[68194]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" acquired by "oslo_vmware.api.VMwareAPISession._create_session" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 569.585077] env[68194]: DEBUG oslo_vmware.service [-] Creating suds client with soap_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk' and wsdl_url='https://vc1.osci.c.eu-de-1.cloud.sap:443/sdk/vimService.wsdl' {{(pid=68194) __init__ /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:242}} [ 572.484631] env[68194]: DEBUG oslo_vmware.service [-] Invoking ServiceInstance.RetrieveServiceContent with opID=oslo.vmware-cb56a05f-3850-4ae1-8eef-21b22c0df288 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.504961] env[68194]: DEBUG oslo_vmware.api [-] Logging into host: vc1.osci.c.eu-de-1.cloud.sap. {{(pid=68194) _create_session /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:242}} [ 572.505170] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.Login with opID=oslo.vmware-182bff4d-971d-4b40-b3c9-bcc3137de955 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.532235] env[68194]: INFO oslo_vmware.api [-] Successfully established new session; session ID is 3e48a. [ 572.532388] env[68194]: DEBUG oslo_concurrency.lockutils [-] Lock "oslo_vmware_api_lock" "released" by "oslo_vmware.api.VMwareAPISession._create_session" :: held 2.947s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 572.533105] env[68194]: INFO nova.virt.vmwareapi.driver [None req-54793f71-e777-47e0-96c4-bd1d44ee6030 None None] VMware vCenter version: 7.0.3 [ 572.538359] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f395cf1-2358-478c-9a7b-85bbdc055cb3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.560994] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ae92069-086a-4da6-aa3d-e67508f2e977 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.566784] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-df695560-1cae-4dbf-bcec-42bf9079313a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.573374] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-40aaf20b-95d8-4eb0-8da3-c0630c2f83ea {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.586363] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d92fc0b-b2be-4c07-94d1-43a6f06b210d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.592529] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58e97aef-0b84-4388-a089-7602bd2666fc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.622780] env[68194]: DEBUG oslo_vmware.service [-] Invoking ExtensionManager.FindExtension with opID=oslo.vmware-3daf0a7a-5316-4915-a209-0c80b096c561 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.627946] env[68194]: DEBUG nova.virt.vmwareapi.driver [None req-54793f71-e777-47e0-96c4-bd1d44ee6030 None None] Extension org.openstack.compute already exists. {{(pid=68194) _register_openstack_extension /opt/stack/nova/nova/virt/vmwareapi/driver.py:224}} [ 572.630680] env[68194]: INFO nova.compute.provider_config [None req-54793f71-e777-47e0-96c4-bd1d44ee6030 None None] No provider configs found in /etc/nova/provider_config/. If files are present, ensure the Nova process has access. [ 572.649018] env[68194]: DEBUG nova.context [None req-54793f71-e777-47e0-96c4-bd1d44ee6030 None None] Found 2 cells: 00000000-0000-0000-0000-000000000000(cell0),4b825d40-9940-47af-8eac-301fde280bd0(cell1) {{(pid=68194) load_cells /opt/stack/nova/nova/context.py:464}} [ 572.651086] env[68194]: DEBUG oslo_concurrency.lockutils [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Acquiring lock "00000000-0000-0000-0000-000000000000" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 572.651311] env[68194]: DEBUG oslo_concurrency.lockutils [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Lock "00000000-0000-0000-0000-000000000000" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 572.652027] env[68194]: DEBUG oslo_concurrency.lockutils [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Lock "00000000-0000-0000-0000-000000000000" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 572.652452] env[68194]: DEBUG oslo_concurrency.lockutils [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Acquiring lock "4b825d40-9940-47af-8eac-301fde280bd0" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 572.652643] env[68194]: DEBUG oslo_concurrency.lockutils [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Lock "4b825d40-9940-47af-8eac-301fde280bd0" acquired by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 572.654401] env[68194]: DEBUG oslo_concurrency.lockutils [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Lock "4b825d40-9940-47af-8eac-301fde280bd0" "released" by "nova.context.set_target_cell..get_or_set_cached_cell_and_set_connections" :: held 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 572.678345] env[68194]: INFO dbcounter [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Registered counter for database nova_cell0 [ 572.686871] env[68194]: INFO dbcounter [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Registered counter for database nova_cell1 [ 572.689791] env[68194]: DEBUG oslo_db.sqlalchemy.engines [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68194) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 572.690157] env[68194]: DEBUG oslo_db.sqlalchemy.engines [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] MySQL server mode set to STRICT_TRANS_TABLES,STRICT_ALL_TABLES,NO_ZERO_IN_DATE,NO_ZERO_DATE,ERROR_FOR_DIVISION_BY_ZERO,TRADITIONAL,NO_AUTO_CREATE_USER,NO_ENGINE_SUBSTITUTION {{(pid=68194) _check_effective_sql_mode /opt/stack/data/venv/lib/python3.10/site-packages/oslo_db/sqlalchemy/engines.py:342}} [ 572.694661] env[68194]: DEBUG dbcounter [-] [68194] Writer thread running {{(pid=68194) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 572.695388] env[68194]: DEBUG dbcounter [-] [68194] Writer thread running {{(pid=68194) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:102}} [ 572.697645] env[68194]: ERROR nova.db.main.api [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 221, in main [ 572.697645] env[68194]: result = function(*args, **kwargs) [ 572.697645] env[68194]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 572.697645] env[68194]: return func(*args, **kwargs) [ 572.697645] env[68194]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 572.697645] env[68194]: result = fn(*args, **kwargs) [ 572.697645] env[68194]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 572.697645] env[68194]: return f(*args, **kwargs) [ 572.697645] env[68194]: File "/opt/stack/nova/nova/objects/service.py", line 548, in _db_service_get_minimum_version [ 572.697645] env[68194]: return db.service_get_minimum_version(context, binaries) [ 572.697645] env[68194]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 572.697645] env[68194]: _check_db_access() [ 572.697645] env[68194]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 572.697645] env[68194]: stacktrace = ''.join(traceback.format_stack()) [ 572.697645] env[68194]: [ 572.698439] env[68194]: ERROR nova.db.main.api [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] No DB access allowed in nova-compute: File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/greenthread.py", line 221, in main [ 572.698439] env[68194]: result = function(*args, **kwargs) [ 572.698439] env[68194]: File "/opt/stack/nova/nova/utils.py", line 664, in context_wrapper [ 572.698439] env[68194]: return func(*args, **kwargs) [ 572.698439] env[68194]: File "/opt/stack/nova/nova/context.py", line 422, in gather_result [ 572.698439] env[68194]: result = fn(*args, **kwargs) [ 572.698439] env[68194]: File "/opt/stack/nova/nova/db/main/api.py", line 179, in wrapper [ 572.698439] env[68194]: return f(*args, **kwargs) [ 572.698439] env[68194]: File "/opt/stack/nova/nova/objects/service.py", line 548, in _db_service_get_minimum_version [ 572.698439] env[68194]: return db.service_get_minimum_version(context, binaries) [ 572.698439] env[68194]: File "/opt/stack/nova/nova/db/main/api.py", line 238, in wrapper [ 572.698439] env[68194]: _check_db_access() [ 572.698439] env[68194]: File "/opt/stack/nova/nova/db/main/api.py", line 188, in _check_db_access [ 572.698439] env[68194]: stacktrace = ''.join(traceback.format_stack()) [ 572.698439] env[68194]: [ 572.698817] env[68194]: WARNING nova.objects.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Failed to get minimum service version for cell 4b825d40-9940-47af-8eac-301fde280bd0 [ 572.698933] env[68194]: WARNING nova.objects.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Failed to get minimum service version for cell 00000000-0000-0000-0000-000000000000 [ 572.699382] env[68194]: DEBUG oslo_concurrency.lockutils [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Acquiring lock "singleton_lock" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 572.699569] env[68194]: DEBUG oslo_concurrency.lockutils [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Acquired lock "singleton_lock" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 572.699824] env[68194]: DEBUG oslo_concurrency.lockutils [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Releasing lock "singleton_lock" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 572.700169] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Full set of CONF: {{(pid=68194) _wait_for_exit_or_signal /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/service.py:362}} [ 572.700315] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ******************************************************************************** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2589}} [ 572.700445] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] Configuration options gathered from: {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2590}} [ 572.700601] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] command line args: ['--config-file', '/etc/nova/nova.conf', '--config-file', '/etc/nova/nova-cpu-common.conf', '--config-file', '/etc/nova/nova-cpu-1.conf'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2591}} [ 572.700803] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] config files: ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2592}} [ 572.700936] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ================================================================================ {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2594}} [ 572.701166] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] allow_resize_to_same_host = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.701341] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] arq_binding_timeout = 300 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.701473] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] backdoor_port = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.701599] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] backdoor_socket = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.701761] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] block_device_allocate_retries = 60 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.701929] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] block_device_allocate_retries_interval = 3 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.702120] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cert = self.pem {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.702294] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute_driver = vmwareapi.VMwareVCDriver {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.702466] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute_monitors = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.702635] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] config_dir = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.702811] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] config_drive_format = iso9660 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.702948] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] config_file = ['/etc/nova/nova.conf', '/etc/nova/nova-cpu-common.conf', '/etc/nova/nova-cpu-1.conf'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.703129] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] config_source = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.703301] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] console_host = devstack {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.703472] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] control_exchange = nova {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.703634] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cpu_allocation_ratio = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.703801] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] daemon = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.703969] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] debug = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.704147] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] default_access_ip_network_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.704317] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] default_availability_zone = nova {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.704473] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] default_ephemeral_format = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.704634] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] default_green_pool_size = 1000 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.704882] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] default_log_levels = ['amqp=WARN', 'amqplib=WARN', 'boto=WARN', 'qpid=WARN', 'sqlalchemy=WARN', 'suds=INFO', 'oslo.messaging=INFO', 'oslo_messaging=INFO', 'iso8601=WARN', 'requests.packages.urllib3.connectionpool=WARN', 'urllib3.connectionpool=WARN', 'websocket=WARN', 'requests.packages.urllib3.util.retry=WARN', 'urllib3.util.retry=WARN', 'keystonemiddleware=WARN', 'routes.middleware=WARN', 'stevedore=WARN', 'taskflow=WARN', 'keystoneauth=WARN', 'oslo.cache=INFO', 'oslo_policy=INFO', 'dogpile.core.dogpile=INFO', 'glanceclient=WARN', 'oslo.privsep.daemon=INFO'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.705072] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] default_schedule_zone = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.705240] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] disk_allocation_ratio = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.705404] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] enable_new_services = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.705584] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] enabled_apis = ['osapi_compute'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.705752] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] enabled_ssl_apis = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.705912] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] flat_injected = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.706085] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] force_config_drive = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.706248] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] force_raw_images = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.706417] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] graceful_shutdown_timeout = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.706577] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] heal_instance_info_cache_interval = 60 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.706796] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] host = cpu-1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.706969] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] initial_cpu_allocation_ratio = 4.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.707148] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] initial_disk_allocation_ratio = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.707313] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] initial_ram_allocation_ratio = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.707527] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] injected_network_template = /opt/stack/nova/nova/virt/interfaces.template {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.707694] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] instance_build_timeout = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.707855] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] instance_delete_interval = 300 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.708035] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] instance_format = [instance: %(uuid)s] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.708210] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] instance_name_template = instance-%08x {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.708375] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] instance_usage_audit = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.708550] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] instance_usage_audit_period = month {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.708722] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] instance_uuid_format = [instance: %(uuid)s] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.708893] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] instances_path = /opt/stack/data/nova/instances {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.709074] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] internal_service_availability_zone = internal {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.709238] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] key = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.709401] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] live_migration_retry_count = 30 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.709602] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] log_config_append = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.709772] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] log_date_format = %Y-%m-%d %H:%M:%S {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.709939] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] log_dir = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.710114] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] log_file = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.710247] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] log_options = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.710413] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] log_rotate_interval = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.710606] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] log_rotate_interval_type = days {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.710791] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] log_rotation_type = none {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.710927] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] logging_context_format_string = %(color)s%(levelname)s %(name)s [%(global_request_id)s %(request_id)s %(project_name)s %(user_name)s%(color)s] %(instance)s%(color)s%(message)s {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.711067] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] logging_debug_format_suffix = {{(pid=%(process)d) %(funcName)s %(pathname)s:%(lineno)d}} {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.711246] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] logging_default_format_string = %(color)s%(levelname)s %(name)s [-%(color)s] %(instance)s%(color)s%(message)s {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.711418] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] logging_exception_prefix = ERROR %(name)s %(instance)s {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.711549] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] logging_user_identity_format = %(user)s %(project)s %(domain)s %(system_scope)s %(user_domain)s %(project_domain)s {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.711717] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] long_rpc_timeout = 1800 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.711882] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] max_concurrent_builds = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.712062] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] max_concurrent_live_migrations = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.712230] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] max_concurrent_snapshots = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.712393] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] max_local_block_devices = 3 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.712556] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] max_logfile_count = 30 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.712743] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] max_logfile_size_mb = 200 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.712919] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] maximum_instance_delete_attempts = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.713103] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] metadata_listen = 0.0.0.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.713278] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] metadata_listen_port = 8775 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.713448] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] metadata_workers = 2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.713609] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] migrate_max_retries = -1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.713779] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] mkisofs_cmd = genisoimage {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.713991] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] my_block_storage_ip = 10.180.1.21 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.714140] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] my_ip = 10.180.1.21 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.714307] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] network_allocate_retries = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.714487] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] non_inheritable_image_properties = ['cache_in_nova', 'bittorrent'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.714658] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] osapi_compute_listen = 0.0.0.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.714826] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] osapi_compute_listen_port = 8774 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.714995] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] osapi_compute_unique_server_name_scope = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.715177] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] osapi_compute_workers = 2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.715343] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] password_length = 12 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.715507] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] periodic_enable = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.715669] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] periodic_fuzzy_delay = 60 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.715841] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] pointer_model = usbtablet {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.716013] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] preallocate_images = none {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.716183] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] publish_errors = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.716315] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] pybasedir = /opt/stack/nova {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.716476] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ram_allocation_ratio = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.716638] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] rate_limit_burst = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.716809] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] rate_limit_except_level = CRITICAL {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.716970] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] rate_limit_interval = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.717144] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] reboot_timeout = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.717307] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] reclaim_instance_interval = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.717473] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] record = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.717638] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] reimage_timeout_per_gb = 60 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.717806] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] report_interval = 120 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.717970] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] rescue_timeout = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.718142] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] reserved_host_cpus = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.718302] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] reserved_host_disk_mb = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.718461] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] reserved_host_memory_mb = 512 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.718622] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] reserved_huge_pages = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.718783] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] resize_confirm_window = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.718943] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] resize_fs_using_block_device = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.719118] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] resume_guests_state_on_host_boot = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.719289] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] rootwrap_config = /etc/nova/rootwrap.conf {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.719453] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] rpc_response_timeout = 60 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.719676] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] run_external_periodic_tasks = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.719889] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] running_deleted_instance_action = reap {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.720090] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] running_deleted_instance_poll_interval = 1800 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.720280] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] running_deleted_instance_timeout = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.720467] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] scheduler_instance_sync_interval = 120 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.720686] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] service_down_time = 720 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.720877] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] servicegroup_driver = db {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.721055] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] shelved_offload_time = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.721246] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] shelved_poll_interval = 3600 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.721424] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] shutdown_timeout = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.721591] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] source_is_ipv6 = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.721752] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ssl_only = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.722013] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] state_path = /opt/stack/data/n-cpu-1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.722194] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] sync_power_state_interval = 600 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.722359] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] sync_power_state_pool_size = 1000 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.722530] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] syslog_log_facility = LOG_USER {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.722699] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] tempdir = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.722855] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] timeout_nbd = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.723034] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] transport_url = **** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.723203] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] update_resources_interval = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.723372] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] use_cow_images = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.723558] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] use_eventlog = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.723725] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] use_journal = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.723886] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] use_json = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.724057] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] use_rootwrap_daemon = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.724223] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] use_stderr = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.724385] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] use_syslog = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.724539] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vcpu_pin_set = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.724709] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plugging_is_fatal = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.724874] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plugging_timeout = 300 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.725051] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] virt_mkfs = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.725224] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] volume_usage_poll_interval = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.725399] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] watch_log_file = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.725575] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] web = /usr/share/spice-html5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2602}} [ 572.725771] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_concurrency.disable_process_locking = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.726086] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_concurrency.lock_path = /opt/stack/data/n-cpu-1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.726276] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_metrics.metrics_buffer_size = 1000 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.726448] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_metrics.metrics_enabled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.726624] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_metrics.metrics_process_name = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.726798] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_metrics.metrics_socket_file = /var/tmp/metrics_collector.sock {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.726965] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_metrics.metrics_thread_stop_timeout = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.727166] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.auth_strategy = keystone {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.727338] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.compute_link_prefix = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.727556] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.config_drive_skip_versions = 1.0 2007-01-19 2007-03-01 2007-08-29 2007-10-10 2007-12-15 2008-02-01 2008-09-01 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.727698] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.dhcp_domain = novalocal {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.727871] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.enable_instance_password = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.728047] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.glance_link_prefix = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.728222] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.instance_list_cells_batch_fixed_size = 100 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.728396] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.instance_list_cells_batch_strategy = distributed {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.728560] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.instance_list_per_project_cells = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.728729] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.list_records_by_skipping_down_cells = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.728891] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.local_metadata_per_cell = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.729073] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.max_limit = 1000 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.729249] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.metadata_cache_expiration = 15 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.729426] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.neutron_default_tenant_id = default {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.729617] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.use_forwarded_for = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.729796] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.use_neutron_default_nets = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.729976] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.vendordata_dynamic_connect_timeout = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.730147] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.vendordata_dynamic_failure_fatal = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.730318] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.vendordata_dynamic_read_timeout = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.730494] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.vendordata_dynamic_ssl_certfile = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.730711] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.vendordata_dynamic_targets = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.730919] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.vendordata_jsonfile_path = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.731125] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api.vendordata_providers = ['StaticJSON'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.731327] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.backend = dogpile.cache.memcached {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.731503] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.backend_argument = **** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.731680] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.config_prefix = cache.oslo {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.731856] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.dead_timeout = 60.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.732037] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.debug_cache_backend = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.732211] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.enable_retry_client = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.732380] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.enable_socket_keepalive = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.732553] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.enabled = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.732723] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.expiration_time = 600 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.732890] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.hashclient_retry_attempts = 2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.733074] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.hashclient_retry_delay = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.733271] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.memcache_dead_retry = 300 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.733455] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.memcache_password = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.733624] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.memcache_pool_connection_get_timeout = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.733795] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.memcache_pool_flush_on_reconnect = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.733964] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.memcache_pool_maxsize = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.734144] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.memcache_pool_unused_timeout = 60 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.734311] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.memcache_sasl_enabled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.734493] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.memcache_servers = ['localhost:11211'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.734694] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.memcache_socket_timeout = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.734841] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.memcache_username = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.735017] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.proxies = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.735191] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.retry_attempts = 2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.735363] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.retry_delay = 0.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.735531] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.socket_keepalive_count = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.735698] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.socket_keepalive_idle = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.735865] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.socket_keepalive_interval = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.736036] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.tls_allowed_ciphers = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.736200] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.tls_cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.736362] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.tls_certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.736528] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.tls_enabled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.736700] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cache.tls_keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.736882] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.auth_section = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.737075] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.auth_type = password {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.737249] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.737432] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.catalog_info = volumev3::publicURL {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.737595] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.737764] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.737931] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.cross_az_attach = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.738107] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.debug = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.738276] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.endpoint_template = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.738443] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.http_retries = 3 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.738611] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.738775] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.738951] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.os_region_name = RegionOne {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.739130] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.739296] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cinder.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.739471] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute.consecutive_build_service_disable_threshold = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.739656] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute.cpu_dedicated_set = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.739824] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute.cpu_shared_set = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.739992] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute.image_type_exclude_list = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.740172] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute.live_migration_wait_for_vif_plug = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.740339] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute.max_concurrent_disk_ops = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.740503] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute.max_disk_devices_to_attach = -1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.740669] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute.packing_host_numa_cells_allocation_strategy = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.740848] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute.provider_config_location = /etc/nova/provider_config/ {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.741019] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute.resource_provider_association_refresh = 300 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.741189] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute.shutdown_retry_interval = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.741372] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] compute.vmdk_allowed_types = ['streamOptimized', 'monolithicSparse'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.741556] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] conductor.workers = 2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.741737] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] console.allowed_origins = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.741904] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] console.ssl_ciphers = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.742090] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] console.ssl_minimum_version = default {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.742272] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] consoleauth.token_ttl = 600 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.742445] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.742609] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.742777] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.742940] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.connect_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.743114] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.connect_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.743279] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.endpoint_override = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.743445] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.743607] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.743768] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.max_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.743928] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.min_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.744099] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.region_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.744264] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.service_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.744434] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.service_type = accelerator {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.744597] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.744757] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.status_code_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.744918] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.status_code_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.745091] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.745277] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.valid_interfaces = ['internal', 'public'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.745443] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] cyborg.version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.745645] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.backend = sqlalchemy {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.746281] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.connection = **** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.746281] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.connection_debug = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.746281] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.connection_parameters = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.746404] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.connection_recycle_time = 3600 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.746468] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.connection_trace = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.747220] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.db_inc_retry_interval = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.747220] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.db_max_retries = 20 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.747220] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.db_max_retry_interval = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.747220] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.db_retry_interval = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.748916] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.max_overflow = 50 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.748916] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.max_pool_size = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.748916] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.max_retries = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.748916] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.mysql_sql_mode = TRADITIONAL {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.748916] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.mysql_wsrep_sync_wait = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.748916] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.pool_timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.749183] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.retry_interval = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.749183] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.slave_connection = **** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.749183] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.sqlite_synchronous = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.749183] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] database.use_db_reconnect = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.749183] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.backend = sqlalchemy {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.749183] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.connection = **** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.749371] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.connection_debug = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.749475] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.connection_parameters = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.749633] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.connection_recycle_time = 3600 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.749816] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.connection_trace = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.749981] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.db_inc_retry_interval = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.750232] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.db_max_retries = 20 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.750338] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.db_max_retry_interval = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.750500] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.db_retry_interval = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.750701] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.max_overflow = 50 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.750873] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.max_pool_size = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.751056] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.max_retries = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.751236] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.mysql_sql_mode = TRADITIONAL {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.751398] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.mysql_wsrep_sync_wait = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.751563] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.pool_timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.751733] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.retry_interval = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.751913] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.slave_connection = **** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.752109] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] api_database.sqlite_synchronous = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.752292] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] devices.enabled_mdev_types = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.752475] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ephemeral_storage_encryption.cipher = aes-xts-plain64 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.752645] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ephemeral_storage_encryption.enabled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.752820] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ephemeral_storage_encryption.key_size = 512 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.752994] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.api_servers = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.753177] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.753343] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.753509] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.753670] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.connect_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.753834] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.connect_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.753998] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.debug = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.754178] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.default_trusted_certificate_ids = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.754342] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.enable_certificate_validation = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.754507] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.enable_rbd_download = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.754668] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.endpoint_override = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.754836] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.754999] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.755175] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.max_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.755335] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.min_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.755502] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.num_retries = 3 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.755675] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.rbd_ceph_conf = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.755840] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.rbd_connect_timeout = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.756021] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.rbd_pool = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.756190] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.rbd_user = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.756355] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.region_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.756519] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.service_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.756690] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.service_type = image {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.756856] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.757026] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.status_code_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.757194] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.status_code_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.757355] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.757540] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.valid_interfaces = ['internal', 'public'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.757706] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.verify_glance_signatures = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.757868] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] glance.version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.758054] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] guestfs.debug = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.758232] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.config_drive_cdrom = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.758396] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.config_drive_inject_password = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.758564] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.dynamic_memory_ratio = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.758732] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.enable_instance_metrics_collection = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.758896] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.enable_remotefx = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.759078] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.instances_path_share = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.759249] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.iscsi_initiator_list = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.759413] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.limit_cpu_features = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.759606] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.mounted_disk_query_retry_count = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.759792] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.mounted_disk_query_retry_interval = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.759959] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.power_state_check_timeframe = 60 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.760147] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.power_state_event_polling_interval = 2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.760352] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.qemu_img_cmd = qemu-img.exe {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.760486] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.use_multipath_io = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.760689] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.volume_attach_retry_count = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.760869] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.volume_attach_retry_interval = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.761045] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.vswitch_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.761213] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] hyperv.wait_soft_reboot_seconds = 60 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.761382] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] mks.enabled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.761752] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] mks.mksproxy_base_url = http://127.0.0.1:6090/ {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.761967] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] image_cache.manager_interval = 2400 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.762165] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] image_cache.precache_concurrency = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.762344] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] image_cache.remove_unused_base_images = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.762521] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] image_cache.remove_unused_original_minimum_age_seconds = 86400 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.762694] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] image_cache.remove_unused_resized_minimum_age_seconds = 3600 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.762879] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] image_cache.subdirectory_name = _base {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.763068] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.api_max_retries = 60 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.763241] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.api_retry_interval = 2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.763405] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.auth_section = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.763571] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.auth_type = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.763736] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.763898] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.764076] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.764250] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.conductor_group = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.764414] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.connect_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.764576] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.connect_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.764744] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.endpoint_override = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.764907] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.765079] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.765245] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.max_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.765405] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.min_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.765570] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.peer_list = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.765731] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.region_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.765897] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.serial_console_state_timeout = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.766070] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.service_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.766246] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.service_type = baremetal {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.766413] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.766578] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.status_code_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.766742] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.status_code_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.766903] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.767097] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.valid_interfaces = ['internal', 'public'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.767263] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ironic.version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.767449] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] key_manager.backend = nova.keymgr.conf_key_mgr.ConfKeyManager {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.767628] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] key_manager.fixed_key = **** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.767815] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.auth_endpoint = http://localhost/identity/v3 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.767981] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.barbican_api_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.768157] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.barbican_endpoint = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.768333] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.barbican_endpoint_type = public {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.768496] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.barbican_region_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.768657] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.768820] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.768982] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.769159] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.769322] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.769487] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.number_of_retries = 60 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.769681] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.retry_delay = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.769850] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.send_service_user_token = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.770027] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.770193] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.770357] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.verify_ssl = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.770521] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican.verify_ssl_path = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.770710] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican_service_user.auth_section = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.770878] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican_service_user.auth_type = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.771050] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican_service_user.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.771216] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican_service_user.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.771382] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican_service_user.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.771547] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican_service_user.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.771709] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican_service_user.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.771889] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican_service_user.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.772087] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] barbican_service_user.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.772266] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.approle_role_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.772429] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.approle_secret_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.772590] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.772752] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.772917] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.773092] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.773259] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.773432] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.kv_mountpoint = secret {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.773597] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.kv_path = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.773764] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.kv_version = 2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.773927] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.namespace = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.774134] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.root_token_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.774307] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.774470] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.ssl_ca_crt_file = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.774632] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.774799] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.use_ssl = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.774974] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vault.vault_url = http://127.0.0.1:8200 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.775159] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.auth_section = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.775327] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.auth_type = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.775490] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.775652] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.775820] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.775981] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.connect_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.776160] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.connect_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.776322] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.endpoint_override = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.776487] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.776649] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.776811] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.max_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.776970] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.min_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.777145] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.region_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.777306] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.service_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.777493] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.service_type = identity {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.777690] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.777860] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.status_code_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.778042] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.status_code_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.778211] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.778396] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.valid_interfaces = ['internal', 'public'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.778560] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] keystone.version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.778764] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.connection_uri = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.778927] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.cpu_mode = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.779108] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.cpu_model_extra_flags = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.779280] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.cpu_models = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.779453] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.cpu_power_governor_high = performance {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.779653] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.cpu_power_governor_low = powersave {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.779832] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.cpu_power_management = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.780013] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.cpu_power_management_strategy = cpu_state {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.780185] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.device_detach_attempts = 8 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.780350] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.device_detach_timeout = 20 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.780528] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.disk_cachemodes = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.780723] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.disk_prefix = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.780900] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.enabled_perf_events = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.781080] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.file_backed_memory = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.781253] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.gid_maps = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.781415] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.hw_disk_discard = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.781576] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.hw_machine_type = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.781749] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.images_rbd_ceph_conf = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.781918] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.images_rbd_glance_copy_poll_interval = 15 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.782107] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.images_rbd_glance_copy_timeout = 600 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.782281] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.images_rbd_glance_store_name = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.782452] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.images_rbd_pool = rbd {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.782651] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.images_type = default {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.782832] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.images_volume_group = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.782998] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.inject_key = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.783178] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.inject_partition = -2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.783344] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.inject_password = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.783508] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.iscsi_iface = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.783673] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.iser_use_multipath = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.783839] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_bandwidth = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.784017] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_completion_timeout = 800 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.784186] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_downtime = 500 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.784350] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_downtime_delay = 75 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.784516] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_downtime_steps = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.784680] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_inbound_addr = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.784856] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_permit_auto_converge = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.785023] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_permit_post_copy = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.785193] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_scheme = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.785368] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_timeout_action = abort {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.785533] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_tunnelled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.785696] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_uri = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.785857] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.live_migration_with_native_tls = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.786028] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.max_queues = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.786196] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.mem_stats_period_seconds = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.786356] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.nfs_mount_options = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.786684] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.nfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.786864] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.num_aoe_discover_tries = 3 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.787042] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.num_iser_scan_tries = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.787210] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.num_memory_encrypted_guests = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.787378] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.num_nvme_discover_tries = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.787544] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.num_pcie_ports = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.787715] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.num_volume_scan_tries = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.787882] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.pmem_namespaces = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.788055] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.quobyte_client_cfg = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.788353] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.quobyte_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.788529] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.rbd_connect_timeout = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.788700] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.rbd_destroy_volume_retries = 12 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.788866] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.rbd_destroy_volume_retry_interval = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.789041] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.rbd_secret_uuid = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.789209] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.rbd_user = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.789376] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.realtime_scheduler_priority = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.789566] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.remote_filesystem_transport = ssh {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.789747] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.rescue_image_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.789914] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.rescue_kernel_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.790087] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.rescue_ramdisk_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.790263] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.rng_dev_path = /dev/urandom {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.790426] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.rx_queue_size = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.790628] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.smbfs_mount_options = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.790933] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.smbfs_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.791126] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.snapshot_compression = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.791295] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.snapshot_image_format = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.791523] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.snapshots_directory = /opt/stack/data/nova/instances/snapshots {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.791689] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.sparse_logical_volumes = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.791856] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.swtpm_enabled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.792039] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.swtpm_group = tss {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.792217] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.swtpm_user = tss {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.792388] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.sysinfo_serial = unique {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.792550] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.tb_cache_size = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.792712] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.tx_queue_size = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.792879] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.uid_maps = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.793058] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.use_virtio_for_bridges = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.793235] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.virt_type = kvm {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.793407] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.volume_clear = zero {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.793572] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.volume_clear_size = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.793742] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.volume_use_multipath = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.793907] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.vzstorage_cache_path = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.794089] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.vzstorage_log_path = /var/log/vstorage/%(cluster_name)s/nova.log.gz {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.794265] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.vzstorage_mount_group = qemu {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.794436] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.vzstorage_mount_opts = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.794615] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.vzstorage_mount_perms = 0770 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.794924] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.vzstorage_mount_point_base = /opt/stack/data/n-cpu-1/mnt {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.795122] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.vzstorage_mount_user = stack {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.795298] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] libvirt.wait_soft_reboot_seconds = 120 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.795477] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.auth_section = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.795655] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.auth_type = password {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.795824] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.795992] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.796175] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.796339] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.connect_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.796503] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.connect_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.796677] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.default_floating_pool = public {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.796845] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.endpoint_override = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.797022] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.extension_sync_interval = 600 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.797191] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.http_retries = 3 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.797357] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.797521] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.797681] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.max_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.797854] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.metadata_proxy_shared_secret = **** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.798027] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.min_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.798203] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.ovs_bridge = br-int {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.798371] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.physnets = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.798545] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.region_name = RegionOne {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.798716] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.service_metadata_proxy = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.798879] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.service_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.799067] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.service_type = network {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.799238] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.799400] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.status_code_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.799588] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.status_code_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.799764] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.799955] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.valid_interfaces = ['internal', 'public'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.800136] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] neutron.version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.800316] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] notifications.bdms_in_notifications = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.800499] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] notifications.default_level = INFO {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.800717] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] notifications.notification_format = unversioned {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.800896] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] notifications.notify_on_state_change = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.801089] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] notifications.versioned_notifications_topics = ['versioned_notifications'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.801274] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] pci.alias = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.801449] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] pci.device_spec = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.801619] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] pci.report_in_placement = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.801795] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.auth_section = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.801969] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.auth_type = password {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.802153] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.auth_url = http://10.180.1.21/identity {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.802320] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.802493] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.802676] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.802841] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.connect_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.803008] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.connect_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.803205] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.default_domain_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.803371] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.default_domain_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.803564] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.domain_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.803749] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.domain_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.803915] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.endpoint_override = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.804095] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.804261] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.804422] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.max_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.804596] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.min_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.804787] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.password = **** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.804955] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.project_domain_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.805140] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.project_domain_name = Default {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.805312] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.project_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.805487] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.project_name = service {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.805685] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.region_name = RegionOne {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.805870] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.service_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.806057] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.service_type = placement {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.806229] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.806393] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.status_code_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.806556] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.status_code_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.806720] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.system_scope = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.806879] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.807053] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.trust_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.807218] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.user_domain_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.807390] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.user_domain_name = Default {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.807553] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.user_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.807727] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.username = placement {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.807931] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.valid_interfaces = ['internal', 'public'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.808229] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] placement.version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.808545] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.cores = 20 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.808843] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.count_usage_from_placement = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.809163] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.driver = nova.quota.DbQuotaDriver {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.809389] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.injected_file_content_bytes = 10240 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.809598] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.injected_file_path_length = 255 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.809784] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.injected_files = 5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.809963] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.instances = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.810325] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.key_pairs = 100 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.810325] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.metadata_items = 128 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.810493] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.ram = 51200 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.810690] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.recheck_quota = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.810873] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.server_group_members = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.811059] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] quota.server_groups = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.811240] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] rdp.enabled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.811569] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] rdp.html5_proxy_base_url = http://127.0.0.1:6083/ {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.811762] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] scheduler.discover_hosts_in_cells_interval = -1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.811938] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] scheduler.enable_isolated_aggregate_filtering = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.812122] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] scheduler.image_metadata_prefilter = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.812293] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] scheduler.limit_tenants_to_placement_aggregate = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.812466] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] scheduler.max_attempts = 3 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.812635] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] scheduler.max_placement_results = 1000 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.812802] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] scheduler.placement_aggregate_required_for_tenants = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.812969] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] scheduler.query_placement_for_image_type_support = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.813149] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] scheduler.query_placement_for_routed_network_aggregates = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.813328] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] scheduler.workers = 2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.813509] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.aggregate_image_properties_isolation_namespace = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.813685] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.aggregate_image_properties_isolation_separator = . {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.813871] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.available_filters = ['nova.scheduler.filters.all_filters'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.814057] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.build_failure_weight_multiplier = 1000000.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.814231] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.cpu_weight_multiplier = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.814401] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.cross_cell_move_weight_multiplier = 1000000.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.814569] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.disk_weight_multiplier = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.814764] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.enabled_filters = ['ComputeFilter', 'ComputeCapabilitiesFilter', 'ImagePropertiesFilter', 'ServerGroupAntiAffinityFilter', 'ServerGroupAffinityFilter', 'SameHostFilter', 'DifferentHostFilter'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.814936] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.host_subset_size = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.815119] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.hypervisor_version_weight_multiplier = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.815285] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.image_properties_default_architecture = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.815454] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.io_ops_weight_multiplier = -1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.815620] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.isolated_hosts = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.815787] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.isolated_images = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.815949] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.max_instances_per_host = 50 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.816127] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.max_io_ops_per_host = 8 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.816299] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.num_instances_weight_multiplier = 0.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.816466] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.pci_in_placement = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.816633] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.pci_weight_multiplier = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.816802] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.ram_weight_multiplier = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.816973] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.restrict_isolated_hosts_to_isolated_images = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.817152] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.shuffle_best_same_weighed_hosts = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.817321] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.soft_affinity_weight_multiplier = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.817489] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.soft_anti_affinity_weight_multiplier = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.817662] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.track_instance_changes = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.817846] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] filter_scheduler.weight_classes = ['nova.scheduler.weights.all_weighers'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.818033] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] metrics.required = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.818206] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] metrics.weight_multiplier = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.818375] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] metrics.weight_of_unavailable = -10000.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.818543] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] metrics.weight_setting = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.818846] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] serial_console.base_url = ws://127.0.0.1:6083/ {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.819033] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] serial_console.enabled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.819216] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] serial_console.port_range = 10000:20000 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.819392] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] serial_console.proxyclient_address = 127.0.0.1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.819578] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] serial_console.serialproxy_host = 0.0.0.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.819769] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] serial_console.serialproxy_port = 6083 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.819945] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] service_user.auth_section = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.820136] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] service_user.auth_type = password {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.820303] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] service_user.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.820464] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] service_user.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.820659] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] service_user.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.820833] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] service_user.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.820995] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] service_user.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.821202] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] service_user.send_service_user_token = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.821370] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] service_user.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.821530] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] service_user.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.821707] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] spice.agent_enabled = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.821875] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] spice.enabled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.822196] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] spice.html5proxy_base_url = http://127.0.0.1:6082/spice_auto.html {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.822395] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] spice.html5proxy_host = 0.0.0.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.822572] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] spice.html5proxy_port = 6082 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.822739] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] spice.image_compression = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.822901] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] spice.jpeg_compression = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.823074] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] spice.playback_compression = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.823253] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] spice.server_listen = 127.0.0.1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.823426] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] spice.server_proxyclient_address = 127.0.0.1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.823592] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] spice.streaming_mode = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.823758] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] spice.zlib_compression = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.823929] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] upgrade_levels.baseapi = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.824106] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] upgrade_levels.cert = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.824284] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] upgrade_levels.compute = auto {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.824449] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] upgrade_levels.conductor = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.824611] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] upgrade_levels.scheduler = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.824783] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vendordata_dynamic_auth.auth_section = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.824951] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vendordata_dynamic_auth.auth_type = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.825127] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vendordata_dynamic_auth.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.825294] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vendordata_dynamic_auth.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.825459] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vendordata_dynamic_auth.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.825623] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vendordata_dynamic_auth.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.825786] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vendordata_dynamic_auth.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.825952] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vendordata_dynamic_auth.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.826124] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vendordata_dynamic_auth.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.826303] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.api_retry_count = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.826467] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.ca_file = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.826641] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.cache_prefix = devstack-image-cache {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.826809] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.cluster_name = testcl1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.826974] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.connection_pool_size = 10 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.827147] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.console_delay_seconds = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.827319] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.datastore_regex = ^datastore.* {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.827526] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.host_ip = vc1.osci.c.eu-de-1.cloud.sap {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.827701] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.host_password = **** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.827872] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.host_port = 443 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.828055] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.host_username = administrator@vsphere.local {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.828233] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.insecure = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.828399] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.integration_bridge = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.828564] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.maximum_objects = 100 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.828729] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.pbm_default_policy = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.828893] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.pbm_enabled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.829075] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.pbm_wsdl_location = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.829252] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.serial_log_dir = /opt/vmware/vspc {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.829413] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.serial_port_proxy_uri = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.829595] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.serial_port_service_uri = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.829767] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.task_poll_interval = 0.5 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.829945] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.use_linked_clone = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.830131] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.vnc_keymap = en-us {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.830304] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.vnc_port = 5900 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.830471] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vmware.vnc_port_total = 10000 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.830681] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vnc.auth_schemes = ['none'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.830877] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vnc.enabled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.831210] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vnc.novncproxy_base_url = http://127.0.0.1:6080/vnc_auto.html {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.831402] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vnc.novncproxy_host = 0.0.0.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.831580] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vnc.novncproxy_port = 6080 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.831762] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vnc.server_listen = 127.0.0.1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.831937] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vnc.server_proxyclient_address = 127.0.0.1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.832117] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vnc.vencrypt_ca_certs = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.832283] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vnc.vencrypt_client_cert = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.832444] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vnc.vencrypt_client_key = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.832623] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.disable_compute_service_check_for_ffu = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.832792] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.disable_deep_image_inspection = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.832956] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.disable_fallback_pcpu_query = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.833134] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.disable_group_policy_check_upcall = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.833299] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.disable_libvirt_livesnapshot = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.833463] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.disable_rootwrap = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.833625] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.enable_numa_live_migration = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.833789] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.enable_qemu_monitor_announce_self = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.833954] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.ensure_libvirt_rbd_instance_dir_cleanup = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.834132] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.handle_virt_lifecycle_events = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.834297] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.libvirt_disable_apic = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.834458] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.never_download_image_if_on_rbd = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.834626] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.qemu_monitor_announce_self_count = 3 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.834789] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.qemu_monitor_announce_self_interval = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.834950] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.reserve_disk_resource_for_image_cache = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.835125] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.skip_cpu_compare_at_startup = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.835289] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.skip_cpu_compare_on_dest = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.835450] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.skip_hypervisor_version_check_on_lm = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.835613] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.skip_reserve_in_use_ironic_nodes = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.835773] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.unified_limits_count_pcpu_as_vcpu = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.835936] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] workarounds.wait_for_vif_plugged_event_during_hard_reboot = [] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.836137] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] wsgi.api_paste_config = /etc/nova/api-paste.ini {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.836313] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] wsgi.client_socket_timeout = 900 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.836484] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] wsgi.default_pool_size = 1000 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.836654] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] wsgi.keep_alive = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.836825] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] wsgi.max_header_line = 16384 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.836989] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] wsgi.secure_proxy_ssl_header = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.837168] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] wsgi.ssl_ca_file = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.837330] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] wsgi.ssl_cert_file = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.837493] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] wsgi.ssl_key_file = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.837661] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] wsgi.tcp_keepidle = 600 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.837837] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] wsgi.wsgi_log_format = %(client_ip)s "%(request_line)s" status: %(status_code)s len: %(body_length)s time: %(wall_seconds).7f {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.838013] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] zvm.ca_file = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.838188] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] zvm.cloud_connector_url = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.838476] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] zvm.image_tmp_path = /opt/stack/data/n-cpu-1/images {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.838662] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] zvm.reachable_timeout = 300 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.838853] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_policy.enforce_new_defaults = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.839041] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_policy.enforce_scope = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.839226] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_policy.policy_default_rule = default {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.839412] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_policy.policy_dirs = ['policy.d'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.839627] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_policy.policy_file = policy.yaml {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.839812] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_policy.remote_content_type = application/x-www-form-urlencoded {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.839980] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_policy.remote_ssl_ca_crt_file = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.840159] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_policy.remote_ssl_client_crt_file = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.840324] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_policy.remote_ssl_client_key_file = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.840490] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_policy.remote_ssl_verify_server_crt = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.840689] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_versionedobjects.fatal_exception_format_errors = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.840874] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_middleware.http_basic_auth_user_file = /etc/htpasswd {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.841070] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler.connection_string = messaging:// {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.841251] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler.enabled = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.841421] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler.es_doc_type = notification {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.841588] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler.es_scroll_size = 10000 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.841762] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler.es_scroll_time = 2m {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.841927] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler.filter_error_trace = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.842110] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler.hmac_keys = **** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.842285] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler.sentinel_service_name = mymaster {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.842453] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler.socket_timeout = 0.1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.842619] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler.trace_requests = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.842785] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler.trace_sqlalchemy = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.842969] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler_jaeger.process_tags = {} {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.843148] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler_jaeger.service_name_prefix = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.843317] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] profiler_otlp.service_name_prefix = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.843487] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] remote_debug.host = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.843650] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] remote_debug.port = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.843833] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.amqp_auto_delete = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.843996] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.amqp_durable_queues = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.844173] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.conn_pool_min_size = 2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.844337] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.conn_pool_ttl = 1200 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.844501] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.direct_mandatory_flag = True {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.844666] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.enable_cancel_on_failover = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.844828] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.heartbeat_in_pthread = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.844990] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.heartbeat_rate = 2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.845167] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.heartbeat_timeout_threshold = 60 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.845328] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.kombu_compression = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.845499] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.kombu_failover_strategy = round-robin {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.845671] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.kombu_missing_consumer_retry_timeout = 60 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.845845] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.kombu_reconnect_delay = 1.0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.846022] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.rabbit_ha_queues = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.846192] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.rabbit_interval_max = 30 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.846367] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.rabbit_login_method = AMQPLAIN {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.846533] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.rabbit_qos_prefetch_count = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.846698] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.rabbit_quorum_delivery_limit = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.846865] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_bytes = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.847041] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.rabbit_quorum_max_memory_length = 0 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.847209] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.rabbit_quorum_queue = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.847376] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.rabbit_retry_backoff = 2 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.847541] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.rabbit_retry_interval = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.847702] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.rabbit_transient_queues_ttl = 1800 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.847867] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.rpc_conn_pool_size = 30 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.848046] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.ssl = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.848227] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.ssl_ca_file = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.848402] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.ssl_cert_file = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.848568] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.ssl_enforce_fips_mode = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.848741] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.ssl_key_file = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.848912] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_rabbit.ssl_version = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.849112] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_notifications.driver = ['messagingv2'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.849284] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_notifications.retry = -1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.849468] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_notifications.topics = ['notifications'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.849673] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_messaging_notifications.transport_url = **** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.849858] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.auth_section = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.850037] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.auth_type = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.850204] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.cafile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.850377] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.certfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.850558] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.collect_timing = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.850737] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.connect_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.850901] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.connect_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.851073] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.endpoint_id = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.851238] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.endpoint_override = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.851445] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.insecure = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.851558] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.keyfile = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.851720] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.max_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.851894] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.min_version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.852085] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.region_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.852254] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.service_name = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.852415] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.service_type = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.852580] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.split_loggers = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.852741] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.status_code_retries = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.852902] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.status_code_retry_delay = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.853072] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.timeout = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.853236] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.valid_interfaces = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.853395] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_limit.version = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.853564] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_reports.file_event_handler = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.853736] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_reports.file_event_handler_interval = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.853895] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] oslo_reports.log_dir = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.854080] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plug_linux_bridge_privileged.capabilities = [12] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.854247] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plug_linux_bridge_privileged.group = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.854409] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plug_linux_bridge_privileged.helper_command = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.854577] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plug_linux_bridge_privileged.logger_name = oslo_privsep.daemon {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.854744] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plug_linux_bridge_privileged.thread_pool_size = 8 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.854905] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plug_linux_bridge_privileged.user = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.855087] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plug_ovs_privileged.capabilities = [12, 1] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.855250] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plug_ovs_privileged.group = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.855411] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plug_ovs_privileged.helper_command = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.855576] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plug_ovs_privileged.logger_name = oslo_privsep.daemon {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.855742] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plug_ovs_privileged.thread_pool_size = 8 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.855900] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] vif_plug_ovs_privileged.user = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.856082] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_linux_bridge.flat_interface = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.856265] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_linux_bridge.forward_bridge_interface = ['all'] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.856438] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_linux_bridge.iptables_bottom_regex = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.856612] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_linux_bridge.iptables_drop_action = DROP {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.856785] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_linux_bridge.iptables_top_regex = {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.856953] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_linux_bridge.network_device_mtu = 1500 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.857134] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_linux_bridge.use_ipv6 = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.857299] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_linux_bridge.vlan_interface = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.857477] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_ovs.default_qos_type = linux-noop {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.857652] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_ovs.isolate_vif = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.857821] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_ovs.network_device_mtu = 1500 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.857987] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_ovs.ovs_vsctl_timeout = 120 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.858173] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_ovs.ovsdb_connection = tcp:127.0.0.1:6640 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.858346] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_ovs.ovsdb_interface = native {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.858509] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_vif_ovs.per_port_bridge = False {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.858677] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_brick.lock_path = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.858844] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_brick.wait_mpath_device_attempts = 4 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.859014] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] os_brick.wait_mpath_device_interval = 1 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.859193] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] privsep_osbrick.capabilities = [21] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.859354] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] privsep_osbrick.group = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.859515] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] privsep_osbrick.helper_command = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.859709] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] privsep_osbrick.logger_name = os_brick.privileged {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.859885] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] privsep_osbrick.thread_pool_size = 8 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.860060] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] privsep_osbrick.user = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.860238] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] nova_sys_admin.capabilities = [0, 1, 2, 3, 12, 21] {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.860401] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] nova_sys_admin.group = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.860562] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] nova_sys_admin.helper_command = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.860729] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] nova_sys_admin.logger_name = oslo_privsep.daemon {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.860894] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] nova_sys_admin.thread_pool_size = 8 {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.861066] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] nova_sys_admin.user = None {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2609}} [ 572.861202] env[68194]: DEBUG oslo_service.service [None req-53077056-e415-48ed-b1ff-9b0c7f013dce None None] ******************************************************************************** {{(pid=68194) log_opt_values /opt/stack/data/venv/lib/python3.10/site-packages/oslo_config/cfg.py:2613}} [ 572.861620] env[68194]: INFO nova.service [-] Starting compute node (version 0.0.1) [ 572.871433] env[68194]: WARNING nova.virt.vmwareapi.driver [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] The vmwareapi driver is not tested by the OpenStack project nor does it have clear maintainer(s) and thus its quality can not be ensured. It should be considered experimental and may be removed in a future release. If you are using the driver in production please let us know via the openstack-discuss mailing list. [ 572.871906] env[68194]: INFO nova.virt.node [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Generated node identity 717076d7-0911-435a-89c8-6f0e41bd02c5 [ 572.872143] env[68194]: INFO nova.virt.node [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Wrote node identity 717076d7-0911-435a-89c8-6f0e41bd02c5 to /opt/stack/data/n-cpu-1/compute_id [ 572.884788] env[68194]: WARNING nova.compute.manager [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Compute nodes ['717076d7-0911-435a-89c8-6f0e41bd02c5'] for host cpu-1 were not found in the database. If this is the first time this service is starting on this host, then you can ignore this warning. [ 572.918613] env[68194]: INFO nova.compute.manager [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Looking for unclaimed instances stuck in BUILDING status for nodes managed by this host [ 572.941462] env[68194]: WARNING nova.compute.manager [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] No compute node record found for host cpu-1. If this is the first time this service is starting on this host, then you can ignore this warning.: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host cpu-1 could not be found. [ 572.941731] env[68194]: DEBUG oslo_concurrency.lockutils [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 572.942094] env[68194]: DEBUG oslo_concurrency.lockutils [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 572.942284] env[68194]: DEBUG oslo_concurrency.lockutils [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 572.942452] env[68194]: DEBUG nova.compute.resource_tracker [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 572.943602] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-727329c1-802d-4411-a253-12d85ef88bbd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.952423] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3860e0-0262-4a6b-8faa-45bf5575239f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.966559] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-91497e60-83cd-4f17-bf12-fff88376211e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 572.973042] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-521ae772-b46f-48b1-aaee-3931264f8edb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.002767] env[68194]: DEBUG nova.compute.resource_tracker [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180956MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 573.002923] env[68194]: DEBUG oslo_concurrency.lockutils [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 573.003050] env[68194]: DEBUG oslo_concurrency.lockutils [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 573.015179] env[68194]: WARNING nova.compute.resource_tracker [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] No compute node record for cpu-1:717076d7-0911-435a-89c8-6f0e41bd02c5: nova.exception_Remote.ComputeHostNotFound_Remote: Compute host 717076d7-0911-435a-89c8-6f0e41bd02c5 could not be found. [ 573.028052] env[68194]: INFO nova.compute.resource_tracker [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Compute node record created for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 with uuid: 717076d7-0911-435a-89c8-6f0e41bd02c5 [ 573.110353] env[68194]: DEBUG nova.compute.resource_tracker [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Total usable vcpus: 48, total allocated vcpus: 0 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 573.110542] env[68194]: DEBUG nova.compute.resource_tracker [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=512MB phys_disk=200GB used_disk=0GB total_vcpus=48 used_vcpus=0 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 573.222238] env[68194]: INFO nova.scheduler.client.report [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] [req-df4d853a-113f-4084-a727-c550b1c73923] Created resource provider record via placement API for resource provider with UUID 717076d7-0911-435a-89c8-6f0e41bd02c5 and name domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28. [ 573.239338] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-478ccb27-122d-4671-b3ea-0b6095fbfd38 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.247184] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1653d35f-3d70-4509-a83e-b8a5486c5cc1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.276537] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4325526-4fbd-4c12-8971-4a1aecf7b0c7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.283731] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb30b139-efd2-4ba8-97eb-fbd9f773402f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 573.296323] env[68194]: DEBUG nova.compute.provider_tree [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Updating inventory in ProviderTree for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 573.337507] env[68194]: DEBUG nova.scheduler.client.report [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Updated inventory for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 with generation 0 in Placement from set_inventory_for_provider using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:957}} [ 573.337758] env[68194]: DEBUG nova.compute.provider_tree [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Updating resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5 generation from 0 to 1 during operation: update_inventory {{(pid=68194) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 573.337905] env[68194]: DEBUG nova.compute.provider_tree [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Updating inventory in ProviderTree for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 573.383219] env[68194]: DEBUG nova.compute.provider_tree [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Updating resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5 generation from 1 to 2 during operation: update_traits {{(pid=68194) _update_generation /opt/stack/nova/nova/compute/provider_tree.py:164}} [ 573.400614] env[68194]: DEBUG nova.compute.resource_tracker [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 573.400816] env[68194]: DEBUG oslo_concurrency.lockutils [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.398s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 573.400983] env[68194]: DEBUG nova.service [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Creating RPC server for service compute {{(pid=68194) start /opt/stack/nova/nova/service.py:182}} [ 573.414836] env[68194]: DEBUG nova.service [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] Join ServiceGroup membership for this service compute {{(pid=68194) start /opt/stack/nova/nova/service.py:199}} [ 573.415046] env[68194]: DEBUG nova.servicegroup.drivers.db [None req-182d3c2a-868a-4da0-b4a5-e2cb5c64f37a None None] DB_Driver: join new ServiceGroup member cpu-1 to the compute group, service = {{(pid=68194) join /opt/stack/nova/nova/servicegroup/drivers/db.py:44}} [ 582.696366] env[68194]: DEBUG dbcounter [-] [68194] Writing DB stats nova_cell1:SELECT=1 {{(pid=68194) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 582.697425] env[68194]: DEBUG dbcounter [-] [68194] Writing DB stats nova_cell0:SELECT=1 {{(pid=68194) stat_writer /opt/stack/data/venv/lib/python3.10/site-packages/dbcounter.py:115}} [ 605.308737] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Acquiring lock "a19ec81a-454a-41be-9a1b-37fc645b0c21" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 605.309098] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Lock "a19ec81a-454a-41be-9a1b-37fc645b0c21" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 605.326198] env[68194]: DEBUG nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 605.434156] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 605.434156] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 605.434523] env[68194]: INFO nova.compute.claims [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 605.561464] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45d7e316-de37-4868-ae37-b9c72f7505d7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.569891] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5769156a-4451-4c6c-8af3-83ab839d25db {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.603517] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5befaf8e-127d-48ff-a36d-d9a6e05b9d3a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.611937] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1b8af864-73b5-4ab7-a71f-a56823b2b6db {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 605.625823] env[68194]: DEBUG nova.compute.provider_tree [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 605.637519] env[68194]: DEBUG nova.scheduler.client.report [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 605.655289] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.223s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 605.655798] env[68194]: DEBUG nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 605.700129] env[68194]: DEBUG nova.compute.utils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 605.702032] env[68194]: DEBUG nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 605.702379] env[68194]: DEBUG nova.network.neutron [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 605.722489] env[68194]: DEBUG nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 605.814419] env[68194]: DEBUG nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 607.664720] env[68194]: DEBUG nova.virt.hardware [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 607.665049] env[68194]: DEBUG nova.virt.hardware [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 607.665325] env[68194]: DEBUG nova.virt.hardware [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 607.665325] env[68194]: DEBUG nova.virt.hardware [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 607.665489] env[68194]: DEBUG nova.virt.hardware [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 607.665701] env[68194]: DEBUG nova.virt.hardware [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 607.665896] env[68194]: DEBUG nova.virt.hardware [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 607.666073] env[68194]: DEBUG nova.virt.hardware [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 607.666499] env[68194]: DEBUG nova.virt.hardware [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 607.666683] env[68194]: DEBUG nova.virt.hardware [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 607.666874] env[68194]: DEBUG nova.virt.hardware [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 607.668192] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-716f2b00-5e9a-4334-920b-211f29766c52 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.680752] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83a180d-3d0d-4389-802a-44988a426839 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.700918] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2c9cbf49-8361-4ad1-a552-fb431ea9dc28 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 607.732367] env[68194]: DEBUG nova.policy [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'ae5a0dae03654407964e152296b8e18e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a03e84a9d3f04c9fb3750e5e398a9a60', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 608.259191] env[68194]: DEBUG nova.network.neutron [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Successfully created port: e03b2ae9-ff2c-4e64-98cc-5ce323b15a91 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 610.798797] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Acquiring lock "f1fe2c36-316b-46e8-86d2-a71f018861f0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 610.798797] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Lock "f1fe2c36-316b-46e8-86d2-a71f018861f0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 610.822490] env[68194]: DEBUG nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 610.887485] env[68194]: DEBUG nova.network.neutron [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Successfully updated port: e03b2ae9-ff2c-4e64-98cc-5ce323b15a91 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 610.915228] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 610.915539] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 610.919335] env[68194]: INFO nova.compute.claims [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 610.928450] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Acquiring lock "refresh_cache-a19ec81a-454a-41be-9a1b-37fc645b0c21" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 610.928450] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Acquired lock "refresh_cache-a19ec81a-454a-41be-9a1b-37fc645b0c21" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 610.928450] env[68194]: DEBUG nova.network.neutron [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 611.045098] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56fa162e-6a33-4865-a445-6c27e2d07e5c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.054492] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bf2678a-ee7c-46a0-9c0e-ea3bffe40bd8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.090636] env[68194]: DEBUG nova.network.neutron [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 611.093467] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-79b21d11-efc1-4861-b917-f88b833cf8dc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.101725] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c67791fb-8a3d-4459-aabf-29ae62053791 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.116349] env[68194]: DEBUG nova.compute.provider_tree [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 611.126052] env[68194]: DEBUG nova.scheduler.client.report [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 611.148750] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.233s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 611.149390] env[68194]: DEBUG nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 611.189711] env[68194]: DEBUG nova.compute.utils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 611.191025] env[68194]: DEBUG nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 611.191644] env[68194]: DEBUG nova.network.neutron [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 611.202523] env[68194]: DEBUG nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 611.289652] env[68194]: DEBUG nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 611.329180] env[68194]: DEBUG nova.virt.hardware [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 611.329425] env[68194]: DEBUG nova.virt.hardware [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 611.329658] env[68194]: DEBUG nova.virt.hardware [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 611.329861] env[68194]: DEBUG nova.virt.hardware [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 611.330019] env[68194]: DEBUG nova.virt.hardware [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 611.330170] env[68194]: DEBUG nova.virt.hardware [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 611.330376] env[68194]: DEBUG nova.virt.hardware [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 611.330552] env[68194]: DEBUG nova.virt.hardware [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 611.330801] env[68194]: DEBUG nova.virt.hardware [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 611.330888] env[68194]: DEBUG nova.virt.hardware [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 611.331394] env[68194]: DEBUG nova.virt.hardware [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 611.331900] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d47421e-a01f-4b8f-814c-3b357ac7c205 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.342918] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dcc3704-03e4-4b22-bf4e-5811f86dc5a0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.416763] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_power_states {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.434715] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Getting list of instances from cluster (obj){ [ 611.434715] env[68194]: value = "domain-c8" [ 611.434715] env[68194]: _type = "ClusterComputeResource" [ 611.434715] env[68194]: } {{(pid=68194) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 611.435980] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-747efbe5-0ac5-431c-8f91-5ed027219216 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.447015] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Got total of 0 instances {{(pid=68194) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 611.447153] env[68194]: WARNING nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] While synchronizing instance power states, found 2 instances in the database and 0 instances on the hypervisor. [ 611.447350] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid a19ec81a-454a-41be-9a1b-37fc645b0c21 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 611.447574] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid f1fe2c36-316b-46e8-86d2-a71f018861f0 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 611.447926] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "a19ec81a-454a-41be-9a1b-37fc645b0c21" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 611.448164] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "f1fe2c36-316b-46e8-86d2-a71f018861f0" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 611.448311] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 611.448642] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Getting list of instances from cluster (obj){ [ 611.448642] env[68194]: value = "domain-c8" [ 611.448642] env[68194]: _type = "ClusterComputeResource" [ 611.448642] env[68194]: } {{(pid=68194) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 611.449614] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67ac4aec-1d25-42e9-b722-30e47d2687ad {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.457904] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Got total of 0 instances {{(pid=68194) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 611.570452] env[68194]: DEBUG nova.policy [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '85fddc38ad424d54afa96c8e8d2cdb83', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '77cba93137da4779a7e5f8911f94d7fa', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 611.844710] env[68194]: DEBUG nova.network.neutron [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Updating instance_info_cache with network_info: [{"id": "e03b2ae9-ff2c-4e64-98cc-5ce323b15a91", "address": "fa:16:3e:26:72:dd", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape03b2ae9-ff", "ovs_interfaceid": "e03b2ae9-ff2c-4e64-98cc-5ce323b15a91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 611.866705] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Releasing lock "refresh_cache-a19ec81a-454a-41be-9a1b-37fc645b0c21" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 611.867040] env[68194]: DEBUG nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Instance network_info: |[{"id": "e03b2ae9-ff2c-4e64-98cc-5ce323b15a91", "address": "fa:16:3e:26:72:dd", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape03b2ae9-ff", "ovs_interfaceid": "e03b2ae9-ff2c-4e64-98cc-5ce323b15a91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 611.867636] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:26:72:dd', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e03b2ae9-ff2c-4e64-98cc-5ce323b15a91', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 611.881407] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Creating folder: OpenStack. Parent ref: group-v4. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 611.882018] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-239deb51-d168-4056-8988-2aa295d8ce5a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.896453] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Created folder: OpenStack in parent group-v4. [ 611.897764] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Creating folder: Project (a03e84a9d3f04c9fb3750e5e398a9a60). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 611.897764] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3c0fda5b-b037-4e57-a090-f22285365a32 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.910920] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Created folder: Project (a03e84a9d3f04c9fb3750e5e398a9a60) in parent group-v692426. [ 611.915027] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Creating folder: Instances. Parent ref: group-v692427. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 611.915027] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-09dc5fff-643f-40a5-9063-076532be21c3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.923712] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Created folder: Instances in parent group-v692427. [ 611.924050] env[68194]: DEBUG oslo.service.loopingcall [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 611.924276] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 611.924519] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-917ed099-f22d-43ef-8c77-8609282a7e4d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 611.948603] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 611.948603] env[68194]: value = "task-3466730" [ 611.948603] env[68194]: _type = "Task" [ 611.948603] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 611.957574] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466730, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.326788] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquiring lock "3c6604f4-4be4-45e5-9ff7-fe9ac96693ae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 612.326788] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "3c6604f4-4be4-45e5-9ff7-fe9ac96693ae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 612.339289] env[68194]: DEBUG nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 612.411946] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 612.412408] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 612.415312] env[68194]: INFO nova.compute.claims [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 612.467568] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466730, 'name': CreateVM_Task, 'duration_secs': 0.35068} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 612.467741] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 612.606051] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98991f37-e080-471c-9684-2077cfc84718 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.615455] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34375c66-0a29-4356-a9f0-7563b3b15321 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.653635] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-510f6b9f-d568-4f70-bffa-7be1cb433144 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.660802] env[68194]: DEBUG nova.network.neutron [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Successfully created port: 5c0016cd-06a2-42d8-a2f7-774690deb1cb {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 612.664950] env[68194]: DEBUG oslo_vmware.service [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fc00139-df0d-457a-aed6-b571aad9a881 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.679425] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4021c348-16db-4cdf-b5a1-239bfcf5b351 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.697636] env[68194]: DEBUG nova.compute.provider_tree [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 612.700748] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 612.700933] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 612.702528] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 612.702528] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ba8da28d-a6d6-4ad3-9469-e17298b4694a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.706970] env[68194]: DEBUG oslo_vmware.api [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Waiting for the task: (returnval){ [ 612.706970] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52119500-1e75-fd10-ee42-84ac3e7e2be2" [ 612.706970] env[68194]: _type = "Task" [ 612.706970] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 612.715625] env[68194]: DEBUG oslo_vmware.api [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52119500-1e75-fd10-ee42-84ac3e7e2be2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 612.716973] env[68194]: DEBUG nova.scheduler.client.report [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 612.735970] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.323s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 612.736890] env[68194]: DEBUG nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 612.792845] env[68194]: DEBUG nova.compute.utils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 612.794649] env[68194]: DEBUG nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 612.796930] env[68194]: DEBUG nova.network.neutron [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 612.805408] env[68194]: DEBUG nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 612.887921] env[68194]: DEBUG nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 612.923134] env[68194]: DEBUG nova.virt.hardware [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 612.923134] env[68194]: DEBUG nova.virt.hardware [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 612.923273] env[68194]: DEBUG nova.virt.hardware [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 612.923408] env[68194]: DEBUG nova.virt.hardware [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 612.923508] env[68194]: DEBUG nova.virt.hardware [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 612.923650] env[68194]: DEBUG nova.virt.hardware [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 612.923898] env[68194]: DEBUG nova.virt.hardware [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 612.924672] env[68194]: DEBUG nova.virt.hardware [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 612.924886] env[68194]: DEBUG nova.virt.hardware [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 612.925699] env[68194]: DEBUG nova.virt.hardware [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 612.925699] env[68194]: DEBUG nova.virt.hardware [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 612.926644] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85482869-4853-4e5e-b87c-2c1a2694b570 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 612.935520] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d59f2f8c-0c3d-4896-b993-c0c606e711ab {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.050835] env[68194]: DEBUG nova.policy [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c1868dbe698949eda258250a272225d9', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '85d1287abf7f4a1cb489bef75e83df49', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 613.219275] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 613.219953] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 613.219953] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 613.220101] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 613.220564] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 613.221094] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c6de20b8-2c00-4ddd-8eef-bbf019e28812 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.229565] env[68194]: DEBUG nova.compute.manager [req-f136d99e-bb45-4642-b5a6-3db3721b9e60 req-645bb9d0-b7ab-47d6-97bb-151ca156899c service nova] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Received event network-vif-plugged-e03b2ae9-ff2c-4e64-98cc-5ce323b15a91 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 613.229661] env[68194]: DEBUG oslo_concurrency.lockutils [req-f136d99e-bb45-4642-b5a6-3db3721b9e60 req-645bb9d0-b7ab-47d6-97bb-151ca156899c service nova] Acquiring lock "a19ec81a-454a-41be-9a1b-37fc645b0c21-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 613.231034] env[68194]: DEBUG oslo_concurrency.lockutils [req-f136d99e-bb45-4642-b5a6-3db3721b9e60 req-645bb9d0-b7ab-47d6-97bb-151ca156899c service nova] Lock "a19ec81a-454a-41be-9a1b-37fc645b0c21-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 613.231034] env[68194]: DEBUG oslo_concurrency.lockutils [req-f136d99e-bb45-4642-b5a6-3db3721b9e60 req-645bb9d0-b7ab-47d6-97bb-151ca156899c service nova] Lock "a19ec81a-454a-41be-9a1b-37fc645b0c21-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 613.231034] env[68194]: DEBUG nova.compute.manager [req-f136d99e-bb45-4642-b5a6-3db3721b9e60 req-645bb9d0-b7ab-47d6-97bb-151ca156899c service nova] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] No waiting events found dispatching network-vif-plugged-e03b2ae9-ff2c-4e64-98cc-5ce323b15a91 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 613.231034] env[68194]: WARNING nova.compute.manager [req-f136d99e-bb45-4642-b5a6-3db3721b9e60 req-645bb9d0-b7ab-47d6-97bb-151ca156899c service nova] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Received unexpected event network-vif-plugged-e03b2ae9-ff2c-4e64-98cc-5ce323b15a91 for instance with vm_state building and task_state spawning. [ 613.246290] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 613.246290] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 613.246873] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77f6ed9f-d94c-4bbb-adec-35c67425aeaa {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.258683] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1b1dda18-9712-445e-934f-31c66a90f569 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.267034] env[68194]: DEBUG oslo_vmware.api [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Waiting for the task: (returnval){ [ 613.267034] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52c590a5-00dc-bff9-7dac-4c4c9dd4ba84" [ 613.267034] env[68194]: _type = "Task" [ 613.267034] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 613.274397] env[68194]: DEBUG oslo_vmware.api [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52c590a5-00dc-bff9-7dac-4c4c9dd4ba84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 613.787480] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 613.787480] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Creating directory with path [datastore1] vmware_temp/0fcafe6c-9c6b-4ec6-82d0-92a3f49d4146/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 613.787480] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-814234de-f08e-4c26-8f54-53d731944134 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.808927] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Created directory with path [datastore1] vmware_temp/0fcafe6c-9c6b-4ec6-82d0-92a3f49d4146/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 613.809260] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Fetch image to [datastore1] vmware_temp/0fcafe6c-9c6b-4ec6-82d0-92a3f49d4146/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 613.809497] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/0fcafe6c-9c6b-4ec6-82d0-92a3f49d4146/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 613.812195] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12663600-b634-4d51-b308-e8f716ca9393 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.822140] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3af7bf8-a089-41fc-8fa5-1965670a03f7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.837355] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daad0a11-d679-4a99-94bb-a1f447f6f749 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.869811] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c813d0af-0f29-4116-965b-f0729d5a0772 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.877357] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ee685b0a-dea2-4aae-b0c5-6b6e8a4bf1f5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 613.909413] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 613.996109] env[68194]: DEBUG oslo_vmware.rw_handles [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0fcafe6c-9c6b-4ec6-82d0-92a3f49d4146/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 614.080354] env[68194]: DEBUG oslo_vmware.rw_handles [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 614.081515] env[68194]: DEBUG oslo_vmware.rw_handles [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/0fcafe6c-9c6b-4ec6-82d0-92a3f49d4146/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 614.697268] env[68194]: DEBUG nova.network.neutron [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Successfully created port: 4464d53e-c8f8-4dcf-a2dd-16b1282631a4 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 615.618028] env[68194]: DEBUG nova.network.neutron [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Successfully updated port: 5c0016cd-06a2-42d8-a2f7-774690deb1cb {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 615.630643] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Acquiring lock "refresh_cache-f1fe2c36-316b-46e8-86d2-a71f018861f0" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 615.631094] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Acquired lock "refresh_cache-f1fe2c36-316b-46e8-86d2-a71f018861f0" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 615.631094] env[68194]: DEBUG nova.network.neutron [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 615.780799] env[68194]: DEBUG nova.network.neutron [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 616.910106] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Acquiring lock "a390ced6-9e41-46f3-a330-72d745aeab91" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 616.910106] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Lock "a390ced6-9e41-46f3-a330-72d745aeab91" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 616.927082] env[68194]: DEBUG nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 616.965218] env[68194]: DEBUG nova.network.neutron [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Updating instance_info_cache with network_info: [{"id": "5c0016cd-06a2-42d8-a2f7-774690deb1cb", "address": "fa:16:3e:6d:cc:0c", "network": {"id": "24a7ebcb-c80d-4332-b6e7-bb9601dbe402", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-325564658-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77cba93137da4779a7e5f8911f94d7fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "849fc06e-dfc2-470f-8490-034590682ea7", "external-id": "nsx-vlan-transportzone-567", "segmentation_id": 567, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c0016cd-06", "ovs_interfaceid": "5c0016cd-06a2-42d8-a2f7-774690deb1cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 616.989082] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Releasing lock "refresh_cache-f1fe2c36-316b-46e8-86d2-a71f018861f0" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 616.989280] env[68194]: DEBUG nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Instance network_info: |[{"id": "5c0016cd-06a2-42d8-a2f7-774690deb1cb", "address": "fa:16:3e:6d:cc:0c", "network": {"id": "24a7ebcb-c80d-4332-b6e7-bb9601dbe402", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-325564658-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77cba93137da4779a7e5f8911f94d7fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "849fc06e-dfc2-470f-8490-034590682ea7", "external-id": "nsx-vlan-transportzone-567", "segmentation_id": 567, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c0016cd-06", "ovs_interfaceid": "5c0016cd-06a2-42d8-a2f7-774690deb1cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 616.989742] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6d:cc:0c', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '849fc06e-dfc2-470f-8490-034590682ea7', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5c0016cd-06a2-42d8-a2f7-774690deb1cb', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 617.000762] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Creating folder: Project (77cba93137da4779a7e5f8911f94d7fa). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.001423] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-074c99db-2eb1-49bc-8681-fd49c1188340 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.019525] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Created folder: Project (77cba93137da4779a7e5f8911f94d7fa) in parent group-v692426. [ 617.019739] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Creating folder: Instances. Parent ref: group-v692430. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.021566] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ec6a638f-bc10-4bfc-9811-646f35d7cfbf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.037028] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Created folder: Instances in parent group-v692430. [ 617.037028] env[68194]: DEBUG oslo.service.loopingcall [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 617.037028] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 617.037028] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-64eaa151-0c5b-4bf9-85fc-7f8ecbea5b87 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.057600] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 617.058031] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 617.060329] env[68194]: INFO nova.compute.claims [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 617.064108] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 617.064108] env[68194]: value = "task-3466733" [ 617.064108] env[68194]: _type = "Task" [ 617.064108] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.076413] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466733, 'name': CreateVM_Task} progress is 5%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.160327] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Acquiring lock "dcdac63e-b5bb-4e53-ad3f-956a8b928e2c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 617.160327] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Lock "dcdac63e-b5bb-4e53-ad3f-956a8b928e2c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 617.178559] env[68194]: DEBUG nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 617.266132] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 617.267038] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b4a1b2f8-8728-43fc-8258-edca8e73c336 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.281654] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7f53a10-596b-41ec-b4a8-8acb65b530ad {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.324270] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccf09586-7273-46bb-a292-6f2d67b25174 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.335870] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2001ab05-4334-46b6-95dc-c79c466fa212 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.362075] env[68194]: DEBUG nova.compute.provider_tree [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.371739] env[68194]: DEBUG nova.compute.manager [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Received event network-changed-e03b2ae9-ff2c-4e64-98cc-5ce323b15a91 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 617.371925] env[68194]: DEBUG nova.compute.manager [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Refreshing instance network info cache due to event network-changed-e03b2ae9-ff2c-4e64-98cc-5ce323b15a91. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 617.372151] env[68194]: DEBUG oslo_concurrency.lockutils [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] Acquiring lock "refresh_cache-a19ec81a-454a-41be-9a1b-37fc645b0c21" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 617.372357] env[68194]: DEBUG oslo_concurrency.lockutils [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] Acquired lock "refresh_cache-a19ec81a-454a-41be-9a1b-37fc645b0c21" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 617.373708] env[68194]: DEBUG nova.network.neutron [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Refreshing network info cache for port e03b2ae9-ff2c-4e64-98cc-5ce323b15a91 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 617.384393] env[68194]: DEBUG nova.scheduler.client.report [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 617.404445] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.346s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 617.404996] env[68194]: DEBUG nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 617.409792] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.144s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 617.411300] env[68194]: INFO nova.compute.claims [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 617.451025] env[68194]: DEBUG nova.compute.utils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 617.452854] env[68194]: DEBUG nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Not allocating networking since 'none' was specified. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 617.466273] env[68194]: DEBUG nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 617.576782] env[68194]: DEBUG nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 617.585561] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466733, 'name': CreateVM_Task, 'duration_secs': 0.36406} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 617.586547] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 617.586865] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 617.587821] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 617.588180] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 617.591095] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-3d938a26-bd05-473b-8555-5ce5c356baaf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.597336] env[68194]: DEBUG oslo_vmware.api [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Waiting for the task: (returnval){ [ 617.597336] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5251187d-aa52-db54-6b7c-f9faa633f360" [ 617.597336] env[68194]: _type = "Task" [ 617.597336] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.606178] env[68194]: DEBUG oslo_vmware.api [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5251187d-aa52-db54-6b7c-f9faa633f360, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.618882] env[68194]: DEBUG nova.virt.hardware [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 617.619172] env[68194]: DEBUG nova.virt.hardware [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 617.619356] env[68194]: DEBUG nova.virt.hardware [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 617.619563] env[68194]: DEBUG nova.virt.hardware [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 617.619687] env[68194]: DEBUG nova.virt.hardware [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 617.619829] env[68194]: DEBUG nova.virt.hardware [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 617.620039] env[68194]: DEBUG nova.virt.hardware [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 617.620196] env[68194]: DEBUG nova.virt.hardware [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 617.620370] env[68194]: DEBUG nova.virt.hardware [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 617.620528] env[68194]: DEBUG nova.virt.hardware [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 617.620903] env[68194]: DEBUG nova.virt.hardware [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 617.622147] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c1211c93-7bda-40ea-aefc-b5d0a3f2d1b9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.628998] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c2af9b-b391-477f-9b35-fa2dac9ebab5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.634343] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88aba6ed-85fd-4acc-ad46-de41bd1dd213 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.641188] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-be571ae7-7370-4b6a-a5ba-4a3e141f1990 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.652769] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Instance VIF info [] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 617.658481] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Creating folder: Project (ee6134cfaef34b748c0bc083f39eff99). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.659544] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3f7ca5e0-a373-4df3-b480-4391587dde0c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.688357] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-413795ea-6439-4ed8-8505-cf69b58f0bf0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.697626] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d92060fd-b814-4701-923c-23ec5e7de840 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.702135] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Created folder: Project (ee6134cfaef34b748c0bc083f39eff99) in parent group-v692426. [ 617.702135] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Creating folder: Instances. Parent ref: group-v692433. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 617.702407] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32de37d9-480c-4b5e-a471-66edfbded5cb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.716947] env[68194]: DEBUG nova.compute.provider_tree [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 617.718336] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Created folder: Instances in parent group-v692433. [ 617.719940] env[68194]: DEBUG oslo.service.loopingcall [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 617.719940] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 617.719940] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cf186f18-b9f1-45f7-acdd-ec74f9ccb033 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 617.741880] env[68194]: DEBUG nova.scheduler.client.report [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 617.754135] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 617.754135] env[68194]: value = "task-3466736" [ 617.754135] env[68194]: _type = "Task" [ 617.754135] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 617.769054] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466736, 'name': CreateVM_Task} progress is 5%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 617.769054] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.357s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 617.769054] env[68194]: DEBUG nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 617.828029] env[68194]: DEBUG nova.compute.utils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 617.829104] env[68194]: DEBUG nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Not allocating networking since 'none' was specified. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 617.847110] env[68194]: DEBUG nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 617.976073] env[68194]: DEBUG nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 618.024708] env[68194]: DEBUG nova.virt.hardware [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 618.024708] env[68194]: DEBUG nova.virt.hardware [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 618.024708] env[68194]: DEBUG nova.virt.hardware [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 618.024863] env[68194]: DEBUG nova.virt.hardware [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 618.024863] env[68194]: DEBUG nova.virt.hardware [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 618.024863] env[68194]: DEBUG nova.virt.hardware [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 618.024863] env[68194]: DEBUG nova.virt.hardware [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 618.024863] env[68194]: DEBUG nova.virt.hardware [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 618.025018] env[68194]: DEBUG nova.virt.hardware [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 618.026330] env[68194]: DEBUG nova.virt.hardware [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 618.026551] env[68194]: DEBUG nova.virt.hardware [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 618.028613] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46bc9fd2-46b6-4c22-af27-ac4e2d132a30 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.046108] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76e59c8f-9d7c-4da1-b573-ddf94fb7cb87 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.071511] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Instance VIF info [] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 618.080627] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Creating folder: Project (234a428ca0a44ccb976d91a9f0b5d7ed). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 618.081026] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7de13b90-d1b4-4b8d-8342-d81de6db4734 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.090739] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Created folder: Project (234a428ca0a44ccb976d91a9f0b5d7ed) in parent group-v692426. [ 618.090941] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Creating folder: Instances. Parent ref: group-v692436. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 618.091392] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-de54b483-b0a1-4980-8b46-d8f444ad2734 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.103430] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Created folder: Instances in parent group-v692436. [ 618.103430] env[68194]: DEBUG oslo.service.loopingcall [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 618.104838] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 618.104838] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5247493b-d896-4516-99b2-2f876c6113b4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.128184] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 618.128184] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.128184] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 618.132423] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 618.132423] env[68194]: value = "task-3466739" [ 618.132423] env[68194]: _type = "Task" [ 618.132423] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.142331] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466739, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.267091] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466736, 'name': CreateVM_Task, 'duration_secs': 0.32307} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.270417] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 618.271069] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 618.271332] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 618.271793] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 618.272402] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8321ee95-9cc7-42e0-ae9d-afbfd48feae1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.280521] env[68194]: DEBUG oslo_vmware.api [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Waiting for the task: (returnval){ [ 618.280521] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]528e9e95-b43c-fb97-3dd6-07d4bb103bf4" [ 618.280521] env[68194]: _type = "Task" [ 618.280521] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.296231] env[68194]: DEBUG oslo_vmware.api [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]528e9e95-b43c-fb97-3dd6-07d4bb103bf4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.645926] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466739, 'name': CreateVM_Task, 'duration_secs': 0.300411} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 618.647476] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 618.648080] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 618.722649] env[68194]: DEBUG nova.network.neutron [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Successfully updated port: 4464d53e-c8f8-4dcf-a2dd-16b1282631a4 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 618.793489] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquiring lock "refresh_cache-3c6604f4-4be4-45e5-9ff7-fe9ac96693ae" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 618.793489] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquired lock "refresh_cache-3c6604f4-4be4-45e5-9ff7-fe9ac96693ae" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 618.793489] env[68194]: DEBUG nova.network.neutron [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 618.804416] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 618.804743] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 618.807020] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 618.807020] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 618.807020] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 618.807020] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f3092605-507b-4ddc-b5f2-6d2b553fbab8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 618.813516] env[68194]: DEBUG oslo_vmware.api [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Waiting for the task: (returnval){ [ 618.813516] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52a6cff0-ed8a-06c2-6a98-1af1b7a4499a" [ 618.813516] env[68194]: _type = "Task" [ 618.813516] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 618.824577] env[68194]: DEBUG oslo_vmware.api [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52a6cff0-ed8a-06c2-6a98-1af1b7a4499a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 618.949645] env[68194]: DEBUG nova.network.neutron [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Updated VIF entry in instance network info cache for port e03b2ae9-ff2c-4e64-98cc-5ce323b15a91. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 618.950047] env[68194]: DEBUG nova.network.neutron [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Updating instance_info_cache with network_info: [{"id": "e03b2ae9-ff2c-4e64-98cc-5ce323b15a91", "address": "fa:16:3e:26:72:dd", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.195", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape03b2ae9-ff", "ovs_interfaceid": "e03b2ae9-ff2c-4e64-98cc-5ce323b15a91", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 618.952702] env[68194]: DEBUG nova.network.neutron [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 618.961363] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Acquiring lock "046c6f49-9cbd-4e5a-8a26-d0bc11a57d51" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 618.961363] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Lock "046c6f49-9cbd-4e5a-8a26-d0bc11a57d51" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 618.967950] env[68194]: DEBUG oslo_concurrency.lockutils [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] Releasing lock "refresh_cache-a19ec81a-454a-41be-9a1b-37fc645b0c21" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 618.967950] env[68194]: DEBUG nova.compute.manager [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Received event network-vif-plugged-5c0016cd-06a2-42d8-a2f7-774690deb1cb {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 618.967950] env[68194]: DEBUG oslo_concurrency.lockutils [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] Acquiring lock "f1fe2c36-316b-46e8-86d2-a71f018861f0-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 618.967950] env[68194]: DEBUG oslo_concurrency.lockutils [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] Lock "f1fe2c36-316b-46e8-86d2-a71f018861f0-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 618.968262] env[68194]: DEBUG oslo_concurrency.lockutils [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] Lock "f1fe2c36-316b-46e8-86d2-a71f018861f0-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 618.968262] env[68194]: DEBUG nova.compute.manager [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] No waiting events found dispatching network-vif-plugged-5c0016cd-06a2-42d8-a2f7-774690deb1cb {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 618.968262] env[68194]: WARNING nova.compute.manager [req-b48145ef-21eb-4f7e-9f8b-d96c594e1fca req-be7c51d8-e67e-4d9a-99b4-7809646fae50 service nova] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Received unexpected event network-vif-plugged-5c0016cd-06a2-42d8-a2f7-774690deb1cb for instance with vm_state building and task_state spawning. [ 618.982151] env[68194]: DEBUG nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 619.055566] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 619.056053] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 619.057612] env[68194]: INFO nova.compute.claims [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 619.275679] env[68194]: DEBUG nova.network.neutron [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Updating instance_info_cache with network_info: [{"id": "4464d53e-c8f8-4dcf-a2dd-16b1282631a4", "address": "fa:16:3e:f4:67:a4", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.92", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4464d53e-c8", "ovs_interfaceid": "4464d53e-c8f8-4dcf-a2dd-16b1282631a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 619.278255] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0caf89f0-8bab-4856-9535-c641d8999081 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.287151] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f91f8232-4da2-481d-8314-f40462dae58a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.323126] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7872f827-e8ef-44a4-a346-0257c68e66a4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.325401] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Releasing lock "refresh_cache-3c6604f4-4be4-45e5-9ff7-fe9ac96693ae" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 619.325765] env[68194]: DEBUG nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Instance network_info: |[{"id": "4464d53e-c8f8-4dcf-a2dd-16b1282631a4", "address": "fa:16:3e:f4:67:a4", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.92", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4464d53e-c8", "ovs_interfaceid": "4464d53e-c8f8-4dcf-a2dd-16b1282631a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 619.326273] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f4:67:a4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4464d53e-c8f8-4dcf-a2dd-16b1282631a4', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 619.333503] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Creating folder: Project (85d1287abf7f4a1cb489bef75e83df49). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 619.334816] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7b905460-ab22-4446-9c55-c9f0db35b51f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.344754] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ed1d516-9b24-41d0-ad8d-f4ef857554e7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.349342] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 619.350102] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 619.350102] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 619.361945] env[68194]: DEBUG nova.compute.provider_tree [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 619.364540] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Created folder: Project (85d1287abf7f4a1cb489bef75e83df49) in parent group-v692426. [ 619.364540] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Creating folder: Instances. Parent ref: group-v692439. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 619.364833] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-af231f76-9c3a-4fca-8ea6-fffc9f8ad08d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.373735] env[68194]: DEBUG nova.scheduler.client.report [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 619.377975] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Created folder: Instances in parent group-v692439. [ 619.378223] env[68194]: DEBUG oslo.service.loopingcall [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 619.378584] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 619.378788] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-5fa78c21-3dae-4095-8dfd-f9b05e3d88ee {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.394817] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.339s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 619.395541] env[68194]: DEBUG nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 619.402767] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 619.402767] env[68194]: value = "task-3466742" [ 619.402767] env[68194]: _type = "Task" [ 619.402767] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.411658] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466742, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 619.459386] env[68194]: DEBUG nova.compute.utils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 619.459386] env[68194]: DEBUG nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 619.463286] env[68194]: DEBUG nova.network.neutron [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 619.480708] env[68194]: DEBUG nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 619.567806] env[68194]: DEBUG nova.policy [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '031f99e11248441686f9b10dad811b5f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '294f613acb3b430180f08a45277620fb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 619.582462] env[68194]: DEBUG nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 619.622481] env[68194]: DEBUG nova.virt.hardware [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 619.622714] env[68194]: DEBUG nova.virt.hardware [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 619.622868] env[68194]: DEBUG nova.virt.hardware [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 619.623059] env[68194]: DEBUG nova.virt.hardware [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 619.623208] env[68194]: DEBUG nova.virt.hardware [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 619.623352] env[68194]: DEBUG nova.virt.hardware [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 619.623556] env[68194]: DEBUG nova.virt.hardware [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 619.623707] env[68194]: DEBUG nova.virt.hardware [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 619.623927] env[68194]: DEBUG nova.virt.hardware [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 619.624039] env[68194]: DEBUG nova.virt.hardware [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 619.624210] env[68194]: DEBUG nova.virt.hardware [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 619.625116] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9dda6ac-36f9-4ece-be6c-9735a72c93c6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.634023] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2030e14-6463-4f2c-aa1d-cdfc44bf7939 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.917425] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466742, 'name': CreateVM_Task, 'duration_secs': 0.328899} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 619.917742] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 619.919303] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 619.919656] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 619.920051] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 619.920360] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2ced8c90-9ac7-40b8-8c1d-741209fdb007 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 619.927485] env[68194]: DEBUG oslo_vmware.api [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Waiting for the task: (returnval){ [ 619.927485] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52b9c3e4-d883-f44b-d089-c581f5d144a5" [ 619.927485] env[68194]: _type = "Task" [ 619.927485] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 619.939570] env[68194]: DEBUG oslo_vmware.api [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52b9c3e4-d883-f44b-d089-c581f5d144a5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 620.282032] env[68194]: DEBUG nova.network.neutron [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Successfully created port: 47ae3856-ea04-4667-a477-eb28e7e3135b {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 620.444235] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 620.444503] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 620.444716] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 620.705404] env[68194]: DEBUG nova.compute.manager [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Received event network-changed-5c0016cd-06a2-42d8-a2f7-774690deb1cb {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 620.705968] env[68194]: DEBUG nova.compute.manager [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Refreshing instance network info cache due to event network-changed-5c0016cd-06a2-42d8-a2f7-774690deb1cb. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 620.705968] env[68194]: DEBUG oslo_concurrency.lockutils [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] Acquiring lock "refresh_cache-f1fe2c36-316b-46e8-86d2-a71f018861f0" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 620.706073] env[68194]: DEBUG oslo_concurrency.lockutils [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] Acquired lock "refresh_cache-f1fe2c36-316b-46e8-86d2-a71f018861f0" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 620.706280] env[68194]: DEBUG nova.network.neutron [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Refreshing network info cache for port 5c0016cd-06a2-42d8-a2f7-774690deb1cb {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 621.418698] env[68194]: DEBUG nova.network.neutron [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Updated VIF entry in instance network info cache for port 5c0016cd-06a2-42d8-a2f7-774690deb1cb. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 621.418948] env[68194]: DEBUG nova.network.neutron [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Updating instance_info_cache with network_info: [{"id": "5c0016cd-06a2-42d8-a2f7-774690deb1cb", "address": "fa:16:3e:6d:cc:0c", "network": {"id": "24a7ebcb-c80d-4332-b6e7-bb9601dbe402", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationTestJSON-325564658-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "77cba93137da4779a7e5f8911f94d7fa", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "849fc06e-dfc2-470f-8490-034590682ea7", "external-id": "nsx-vlan-transportzone-567", "segmentation_id": 567, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5c0016cd-06", "ovs_interfaceid": "5c0016cd-06a2-42d8-a2f7-774690deb1cb", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 621.436686] env[68194]: DEBUG oslo_concurrency.lockutils [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] Releasing lock "refresh_cache-f1fe2c36-316b-46e8-86d2-a71f018861f0" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 621.436840] env[68194]: DEBUG nova.compute.manager [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Received event network-vif-plugged-4464d53e-c8f8-4dcf-a2dd-16b1282631a4 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 621.437036] env[68194]: DEBUG oslo_concurrency.lockutils [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] Acquiring lock "3c6604f4-4be4-45e5-9ff7-fe9ac96693ae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 621.439386] env[68194]: DEBUG oslo_concurrency.lockutils [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] Lock "3c6604f4-4be4-45e5-9ff7-fe9ac96693ae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 621.439386] env[68194]: DEBUG oslo_concurrency.lockutils [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] Lock "3c6604f4-4be4-45e5-9ff7-fe9ac96693ae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.002s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 621.439386] env[68194]: DEBUG nova.compute.manager [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] No waiting events found dispatching network-vif-plugged-4464d53e-c8f8-4dcf-a2dd-16b1282631a4 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 621.439386] env[68194]: WARNING nova.compute.manager [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Received unexpected event network-vif-plugged-4464d53e-c8f8-4dcf-a2dd-16b1282631a4 for instance with vm_state building and task_state spawning. [ 621.439583] env[68194]: DEBUG nova.compute.manager [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Received event network-changed-4464d53e-c8f8-4dcf-a2dd-16b1282631a4 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 621.439707] env[68194]: DEBUG nova.compute.manager [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Refreshing instance network info cache due to event network-changed-4464d53e-c8f8-4dcf-a2dd-16b1282631a4. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 621.439900] env[68194]: DEBUG oslo_concurrency.lockutils [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] Acquiring lock "refresh_cache-3c6604f4-4be4-45e5-9ff7-fe9ac96693ae" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 621.440045] env[68194]: DEBUG oslo_concurrency.lockutils [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] Acquired lock "refresh_cache-3c6604f4-4be4-45e5-9ff7-fe9ac96693ae" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 621.440588] env[68194]: DEBUG nova.network.neutron [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Refreshing network info cache for port 4464d53e-c8f8-4dcf-a2dd-16b1282631a4 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 622.643522] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Acquiring lock "10df4090-9ec0-4876-8925-23e585344a3b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 622.644047] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Lock "10df4090-9ec0-4876-8925-23e585344a3b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 622.645364] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Acquiring lock "3d27a0be-599b-4bb4-89db-ff79d33047c8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 622.645446] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Lock "3d27a0be-599b-4bb4-89db-ff79d33047c8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 622.658647] env[68194]: DEBUG nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 622.662551] env[68194]: DEBUG nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 622.778750] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 622.779015] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 622.780949] env[68194]: INFO nova.compute.claims [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 622.784832] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 622.842211] env[68194]: DEBUG nova.network.neutron [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Updated VIF entry in instance network info cache for port 4464d53e-c8f8-4dcf-a2dd-16b1282631a4. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 622.842902] env[68194]: DEBUG nova.network.neutron [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Updating instance_info_cache with network_info: [{"id": "4464d53e-c8f8-4dcf-a2dd-16b1282631a4", "address": "fa:16:3e:f4:67:a4", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.92", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4464d53e-c8", "ovs_interfaceid": "4464d53e-c8f8-4dcf-a2dd-16b1282631a4", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 622.850675] env[68194]: DEBUG nova.network.neutron [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Successfully updated port: 47ae3856-ea04-4667-a477-eb28e7e3135b {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 622.880234] env[68194]: DEBUG oslo_concurrency.lockutils [req-c5411f8d-3ad3-4a07-9e48-01aa104dd526 req-c84c6e7d-7a0c-4c63-a3d6-ce499cc006ef service nova] Releasing lock "refresh_cache-3c6604f4-4be4-45e5-9ff7-fe9ac96693ae" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 622.893314] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Acquiring lock "refresh_cache-046c6f49-9cbd-4e5a-8a26-d0bc11a57d51" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 622.893916] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Acquired lock "refresh_cache-046c6f49-9cbd-4e5a-8a26-d0bc11a57d51" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 622.894269] env[68194]: DEBUG nova.network.neutron [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 623.047288] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3c3d5eb9-bfd1-4846-ba1f-ab3c526d5b21 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.056864] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6e4948d-e93f-4407-8dce-0f6bd856f617 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.062184] env[68194]: DEBUG nova.network.neutron [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 623.094819] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d8a5e47-2287-461e-8ac5-813876d841b5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.104118] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11dc96b4-0291-4e38-89ff-5f30b5629d2c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.124027] env[68194]: DEBUG nova.compute.provider_tree [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.138659] env[68194]: DEBUG nova.scheduler.client.report [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 623.158840] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.380s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 623.159397] env[68194]: DEBUG nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 623.162293] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.378s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 623.163691] env[68194]: INFO nova.compute.claims [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 623.203035] env[68194]: DEBUG nova.compute.utils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 623.204873] env[68194]: DEBUG nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 623.205374] env[68194]: DEBUG nova.network.neutron [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 623.225282] env[68194]: DEBUG nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 623.319369] env[68194]: DEBUG nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 623.359694] env[68194]: DEBUG nova.virt.hardware [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 623.359940] env[68194]: DEBUG nova.virt.hardware [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 623.360453] env[68194]: DEBUG nova.virt.hardware [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.360453] env[68194]: DEBUG nova.virt.hardware [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 623.360453] env[68194]: DEBUG nova.virt.hardware [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.360662] env[68194]: DEBUG nova.virt.hardware [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 623.360791] env[68194]: DEBUG nova.virt.hardware [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 623.360950] env[68194]: DEBUG nova.virt.hardware [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 623.361128] env[68194]: DEBUG nova.virt.hardware [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 623.361293] env[68194]: DEBUG nova.virt.hardware [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 623.361472] env[68194]: DEBUG nova.virt.hardware [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 623.362686] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-733ab762-de7a-4f5d-8cde-fdfd641641e9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.375413] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e221f0c3-0359-4912-806c-159e5b4d4542 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.445054] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8979fedf-2199-4e5d-9e9e-190214de7961 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.460620] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3edd99ce-33c9-423b-82d7-ef81fffb65e9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.501325] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-390df771-aef0-4f14-82b2-23fac165f1c0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.515066] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93b1c7e4-29b3-4dce-bb0a-5d8047d5412a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.539585] env[68194]: DEBUG nova.compute.provider_tree [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 623.554352] env[68194]: DEBUG nova.scheduler.client.report [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 623.577203] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.413s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 623.577203] env[68194]: DEBUG nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 623.622133] env[68194]: DEBUG nova.compute.utils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 623.626301] env[68194]: DEBUG nova.policy [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '711994a0ef704df5aa0b8263c7746774', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '4ed51d359dab4078a43f2e0f55634aa6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 623.628933] env[68194]: DEBUG nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 623.628933] env[68194]: DEBUG nova.network.neutron [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 623.640090] env[68194]: DEBUG nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 623.721750] env[68194]: DEBUG nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 623.757474] env[68194]: DEBUG nova.virt.hardware [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 623.757734] env[68194]: DEBUG nova.virt.hardware [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 623.758941] env[68194]: DEBUG nova.virt.hardware [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 623.759339] env[68194]: DEBUG nova.virt.hardware [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 623.759466] env[68194]: DEBUG nova.virt.hardware [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 623.759602] env[68194]: DEBUG nova.virt.hardware [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 623.759813] env[68194]: DEBUG nova.virt.hardware [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 623.759971] env[68194]: DEBUG nova.virt.hardware [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 623.760247] env[68194]: DEBUG nova.virt.hardware [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 623.760433] env[68194]: DEBUG nova.virt.hardware [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 623.760561] env[68194]: DEBUG nova.virt.hardware [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 623.761785] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02843845-0701-443e-b5a7-49d9835cba8c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.773796] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1150a039-49d8-42fc-ac91-2a591647df9f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 623.867624] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Acquiring lock "4d692986-413f-4c9b-b5cc-de43d2ca498d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 623.868145] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Lock "4d692986-413f-4c9b-b5cc-de43d2ca498d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 623.885077] env[68194]: DEBUG nova.policy [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9bf3fb5cbb8348cab967ce53ce698d20', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '706e87397b1c456a96b7778373e33d85', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 623.889977] env[68194]: DEBUG nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 623.960540] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 623.961227] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 623.962906] env[68194]: INFO nova.compute.claims [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 624.031095] env[68194]: DEBUG nova.network.neutron [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Updating instance_info_cache with network_info: [{"id": "47ae3856-ea04-4667-a477-eb28e7e3135b", "address": "fa:16:3e:e4:d4:47", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.228", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47ae3856-ea", "ovs_interfaceid": "47ae3856-ea04-4667-a477-eb28e7e3135b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 624.047564] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Releasing lock "refresh_cache-046c6f49-9cbd-4e5a-8a26-d0bc11a57d51" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 624.048402] env[68194]: DEBUG nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Instance network_info: |[{"id": "47ae3856-ea04-4667-a477-eb28e7e3135b", "address": "fa:16:3e:e4:d4:47", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.228", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47ae3856-ea", "ovs_interfaceid": "47ae3856-ea04-4667-a477-eb28e7e3135b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 624.049389] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:e4:d4:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '47ae3856-ea04-4667-a477-eb28e7e3135b', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 624.060770] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Creating folder: Project (294f613acb3b430180f08a45277620fb). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 624.065196] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-006ee089-4983-4e3d-8610-f5aa24ee363d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.076173] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Created folder: Project (294f613acb3b430180f08a45277620fb) in parent group-v692426. [ 624.076513] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Creating folder: Instances. Parent ref: group-v692442. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 624.077235] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f03ac8bc-a0bd-4da5-9222-50086e5ded9a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.086725] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Created folder: Instances in parent group-v692442. [ 624.086956] env[68194]: DEBUG oslo.service.loopingcall [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 624.088420] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 624.088420] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-9c6c9b47-4860-4bde-8e52-2e8dc2c55633 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.109494] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 624.109494] env[68194]: value = "task-3466745" [ 624.109494] env[68194]: _type = "Task" [ 624.109494] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 624.117849] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466745, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.223928] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b102f18c-94cd-48fa-bc20-769e2085eb3c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.231150] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dde525ea-e2a6-4fd3-9269-4b32ed4bb946 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.268298] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7807a4e5-dcf3-463d-be90-b2301b991796 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.277695] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-021a2cd1-6bed-4c4d-95a1-c01d25b1f287 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.294132] env[68194]: DEBUG nova.compute.provider_tree [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 624.306036] env[68194]: DEBUG nova.scheduler.client.report [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 624.324383] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.363s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 624.324897] env[68194]: DEBUG nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 624.364571] env[68194]: DEBUG nova.compute.utils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 624.368613] env[68194]: DEBUG nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 624.369461] env[68194]: DEBUG nova.network.neutron [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 624.382726] env[68194]: DEBUG nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 624.462682] env[68194]: DEBUG nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 624.501770] env[68194]: DEBUG nova.virt.hardware [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 624.502141] env[68194]: DEBUG nova.virt.hardware [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 624.502316] env[68194]: DEBUG nova.virt.hardware [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 624.502556] env[68194]: DEBUG nova.virt.hardware [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 624.502730] env[68194]: DEBUG nova.virt.hardware [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 624.503347] env[68194]: DEBUG nova.virt.hardware [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 624.503476] env[68194]: DEBUG nova.virt.hardware [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 624.503662] env[68194]: DEBUG nova.virt.hardware [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 624.503828] env[68194]: DEBUG nova.virt.hardware [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 624.504000] env[68194]: DEBUG nova.virt.hardware [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 624.504218] env[68194]: DEBUG nova.virt.hardware [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 624.506479] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70d1b4aa-b9b0-4aaa-a9f9-71bfa58f3c4a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.516451] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bc3f4f50-1a11-4a71-a118-74d2e3ce9a10 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 624.624906] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466745, 'name': CreateVM_Task} progress is 25%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 624.728015] env[68194]: DEBUG nova.policy [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'a625fc822c0c4b069534ffd6defb303d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '1ebb8ddc821344caa159a750cdef07ed', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 624.988634] env[68194]: DEBUG nova.network.neutron [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Successfully created port: 93f8486e-f79a-433e-948b-ff4452c90a20 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 625.128741] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466745, 'name': CreateVM_Task, 'duration_secs': 0.969231} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 625.128741] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 625.128828] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 625.129355] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 625.129444] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 625.129846] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8146050f-1873-430f-bccd-e19404232f14 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 625.134594] env[68194]: DEBUG oslo_vmware.api [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Waiting for the task: (returnval){ [ 625.134594] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]521bc28a-14ea-9f7a-d657-e29daa34b5d3" [ 625.134594] env[68194]: _type = "Task" [ 625.134594] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 625.143530] env[68194]: DEBUG oslo_vmware.api [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]521bc28a-14ea-9f7a-d657-e29daa34b5d3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 625.169098] env[68194]: DEBUG nova.network.neutron [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Successfully created port: a8ce3fb4-c57b-41c9-b02c-b256b2144c15 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 625.200103] env[68194]: DEBUG nova.compute.manager [req-660260da-b916-4a6d-9a24-64f690e0485a req-e3955393-d819-43b5-93c9-77fd236022b6 service nova] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Received event network-vif-plugged-47ae3856-ea04-4667-a477-eb28e7e3135b {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 625.200103] env[68194]: DEBUG oslo_concurrency.lockutils [req-660260da-b916-4a6d-9a24-64f690e0485a req-e3955393-d819-43b5-93c9-77fd236022b6 service nova] Acquiring lock "046c6f49-9cbd-4e5a-8a26-d0bc11a57d51-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 625.200103] env[68194]: DEBUG oslo_concurrency.lockutils [req-660260da-b916-4a6d-9a24-64f690e0485a req-e3955393-d819-43b5-93c9-77fd236022b6 service nova] Lock "046c6f49-9cbd-4e5a-8a26-d0bc11a57d51-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 625.200103] env[68194]: DEBUG oslo_concurrency.lockutils [req-660260da-b916-4a6d-9a24-64f690e0485a req-e3955393-d819-43b5-93c9-77fd236022b6 service nova] Lock "046c6f49-9cbd-4e5a-8a26-d0bc11a57d51-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 625.200425] env[68194]: DEBUG nova.compute.manager [req-660260da-b916-4a6d-9a24-64f690e0485a req-e3955393-d819-43b5-93c9-77fd236022b6 service nova] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] No waiting events found dispatching network-vif-plugged-47ae3856-ea04-4667-a477-eb28e7e3135b {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 625.200653] env[68194]: WARNING nova.compute.manager [req-660260da-b916-4a6d-9a24-64f690e0485a req-e3955393-d819-43b5-93c9-77fd236022b6 service nova] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Received unexpected event network-vif-plugged-47ae3856-ea04-4667-a477-eb28e7e3135b for instance with vm_state building and task_state spawning. [ 625.651274] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 625.651536] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 625.651747] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 625.870432] env[68194]: DEBUG nova.network.neutron [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Successfully created port: 8f0386c7-0b6b-4096-a8aa-c844cd9bf786 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 627.321017] env[68194]: DEBUG nova.network.neutron [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Successfully updated port: 93f8486e-f79a-433e-948b-ff4452c90a20 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 627.335704] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Acquiring lock "refresh_cache-3d27a0be-599b-4bb4-89db-ff79d33047c8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 627.335926] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Acquired lock "refresh_cache-3d27a0be-599b-4bb4-89db-ff79d33047c8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 627.336103] env[68194]: DEBUG nova.network.neutron [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 627.517329] env[68194]: DEBUG nova.network.neutron [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 627.979871] env[68194]: DEBUG nova.network.neutron [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Successfully updated port: a8ce3fb4-c57b-41c9-b02c-b256b2144c15 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 627.995497] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Acquiring lock "refresh_cache-10df4090-9ec0-4876-8925-23e585344a3b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 627.995655] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Acquired lock "refresh_cache-10df4090-9ec0-4876-8925-23e585344a3b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 627.995820] env[68194]: DEBUG nova.network.neutron [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 628.157869] env[68194]: DEBUG nova.network.neutron [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 628.493644] env[68194]: DEBUG nova.network.neutron [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Updating instance_info_cache with network_info: [{"id": "93f8486e-f79a-433e-948b-ff4452c90a20", "address": "fa:16:3e:0a:0f:db", "network": {"id": "bc963e66-9d9c-4684-8233-bc6bee872cfb", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1417635895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed51d359dab4078a43f2e0f55634aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93f8486e-f7", "ovs_interfaceid": "93f8486e-f79a-433e-948b-ff4452c90a20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.514806] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Releasing lock "refresh_cache-3d27a0be-599b-4bb4-89db-ff79d33047c8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 628.515144] env[68194]: DEBUG nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Instance network_info: |[{"id": "93f8486e-f79a-433e-948b-ff4452c90a20", "address": "fa:16:3e:0a:0f:db", "network": {"id": "bc963e66-9d9c-4684-8233-bc6bee872cfb", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1417635895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed51d359dab4078a43f2e0f55634aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93f8486e-f7", "ovs_interfaceid": "93f8486e-f79a-433e-948b-ff4452c90a20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 628.515588] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0a:0f:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a8140829-5eac-40d8-a10c-eb881f57affc', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '93f8486e-f79a-433e-948b-ff4452c90a20', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 628.527078] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Creating folder: Project (4ed51d359dab4078a43f2e0f55634aa6). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 628.527723] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9f2fbb23-a294-4a93-b26c-d2e519e7b246 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.540503] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Created folder: Project (4ed51d359dab4078a43f2e0f55634aa6) in parent group-v692426. [ 628.540503] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Creating folder: Instances. Parent ref: group-v692445. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 628.540503] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-466921a1-a732-494d-8302-6180069ff751 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.552096] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Created folder: Instances in parent group-v692445. [ 628.552377] env[68194]: DEBUG oslo.service.loopingcall [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 628.552594] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 628.552860] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c808fc1c-7be7-4431-b652-ed47079ccb1a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.579942] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 628.579942] env[68194]: value = "task-3466748" [ 628.579942] env[68194]: _type = "Task" [ 628.579942] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.588186] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466748, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 628.686256] env[68194]: DEBUG nova.network.neutron [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Successfully updated port: 8f0386c7-0b6b-4096-a8aa-c844cd9bf786 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 628.705171] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Acquiring lock "refresh_cache-4d692986-413f-4c9b-b5cc-de43d2ca498d" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 628.715888] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Acquired lock "refresh_cache-4d692986-413f-4c9b-b5cc-de43d2ca498d" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 628.715888] env[68194]: DEBUG nova.network.neutron [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 628.803066] env[68194]: DEBUG nova.network.neutron [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Updating instance_info_cache with network_info: [{"id": "a8ce3fb4-c57b-41c9-b02c-b256b2144c15", "address": "fa:16:3e:8a:58:02", "network": {"id": "ac1efe91-b182-4d8f-9e3f-715439237030", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-44548300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "706e87397b1c456a96b7778373e33d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8ce3fb4-c5", "ovs_interfaceid": "a8ce3fb4-c57b-41c9-b02c-b256b2144c15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 628.822210] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Releasing lock "refresh_cache-10df4090-9ec0-4876-8925-23e585344a3b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 628.827166] env[68194]: DEBUG nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Instance network_info: |[{"id": "a8ce3fb4-c57b-41c9-b02c-b256b2144c15", "address": "fa:16:3e:8a:58:02", "network": {"id": "ac1efe91-b182-4d8f-9e3f-715439237030", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-44548300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "706e87397b1c456a96b7778373e33d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8ce3fb4-c5", "ovs_interfaceid": "a8ce3fb4-c57b-41c9-b02c-b256b2144c15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 628.827515] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:8a:58:02', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'b4d548e7-d762-406a-bb2d-dc7168a8ca67', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a8ce3fb4-c57b-41c9-b02c-b256b2144c15', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 628.845235] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Creating folder: Project (706e87397b1c456a96b7778373e33d85). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 628.846295] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-66027a02-5319-41e7-bb53-92840c2db3a0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.859802] env[68194]: DEBUG nova.network.neutron [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 628.877567] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Created folder: Project (706e87397b1c456a96b7778373e33d85) in parent group-v692426. [ 628.877567] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Creating folder: Instances. Parent ref: group-v692448. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 628.877567] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-01a6dd4c-2ac0-4005-81c9-af9a817b9cdc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.888300] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Created folder: Instances in parent group-v692448. [ 628.888712] env[68194]: DEBUG oslo.service.loopingcall [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 628.889343] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 628.889840] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e9167ae8-e3b5-4e1a-9c8b-b85cbee985ad {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 628.920913] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 628.920913] env[68194]: value = "task-3466751" [ 628.920913] env[68194]: _type = "Task" [ 628.920913] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 628.931847] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466751, 'name': CreateVM_Task} progress is 5%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.081883] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquiring lock "b8aaf064-e8a6-444a-83cd-6a7e02b82f33" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 629.082234] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Lock "b8aaf064-e8a6-444a-83cd-6a7e02b82f33" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 629.095779] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466748, 'name': CreateVM_Task, 'duration_secs': 0.338249} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.096049] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 629.096814] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 629.100020] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 629.100020] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 629.100020] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b4d986ce-8463-42a5-b9b8-e9166f81f04e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.101802] env[68194]: DEBUG nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 629.113367] env[68194]: DEBUG oslo_vmware.api [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Waiting for the task: (returnval){ [ 629.113367] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]524af3d3-50e7-4be4-1422-c4287f4afd15" [ 629.113367] env[68194]: _type = "Task" [ 629.113367] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 629.127079] env[68194]: DEBUG oslo_vmware.api [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]524af3d3-50e7-4be4-1422-c4287f4afd15, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 629.180506] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 629.180815] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 629.182935] env[68194]: INFO nova.compute.claims [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 629.429074] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.429074] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.429074] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 629.429074] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 629.439134] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466751, 'name': CreateVM_Task, 'duration_secs': 0.321121} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 629.441677] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 629.441677] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 629.469334] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 629.469855] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 629.469855] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 629.469855] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 629.469984] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 629.470111] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 629.470272] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 629.471023] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 629.471023] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 629.471023] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 629.471023] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 629.472301] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.473163] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34cc264e-4d0c-4ffa-8b30-2bd5e9622573 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.476693] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.476693] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.476972] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.477344] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.477581] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.477954] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 629.478221] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 629.487701] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fd662ef-a366-481c-a056-3d6b9de3e44d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.521822] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 629.523285] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a524ae0-7775-4aff-88e8-e3906e311f80 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.532340] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c79f1771-1c38-4b92-81bd-5e556c199835 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.548482] env[68194]: DEBUG nova.compute.provider_tree [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 629.558746] env[68194]: DEBUG nova.scheduler.client.report [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 629.578963] env[68194]: DEBUG nova.network.neutron [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Updating instance_info_cache with network_info: [{"id": "8f0386c7-0b6b-4096-a8aa-c844cd9bf786", "address": "fa:16:3e:5c:bf:59", "network": {"id": "5eb01266-94cc-4878-a54b-c76a69be2658", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-251181878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ebb8ddc821344caa159a750cdef07ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f0386c7-0b", "ovs_interfaceid": "8f0386c7-0b6b-4096-a8aa-c844cd9bf786", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 629.591193] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.409s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 629.591193] env[68194]: DEBUG nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 629.593139] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.071s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 629.593139] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 629.593139] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 629.594413] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62e0defb-70e5-463d-b963-88d2c02b2a89 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.605945] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75a48230-88ce-4f69-8df2-e138ee1c231a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 629.614666] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Releasing lock "refresh_cache-4d692986-413f-4c9b-b5cc-de43d2ca498d" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 629.614968] env[68194]: DEBUG nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Instance network_info: |[{"id": "8f0386c7-0b6b-4096-a8aa-c844cd9bf786", "address": "fa:16:3e:5c:bf:59", "network": {"id": "5eb01266-94cc-4878-a54b-c76a69be2658", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-251181878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ebb8ddc821344caa159a750cdef07ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f0386c7-0b", "ovs_interfaceid": "8f0386c7-0b6b-4096-a8aa-c844cd9bf786", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 629.619307] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5c:bf:59', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'a0d2101e-2d93-4310-a242-af2d9ecdaf9b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '8f0386c7-0b6b-4096-a8aa-c844cd9bf786', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 629.628372] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Creating folder: Project (1ebb8ddc821344caa159a750cdef07ed). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 630.190842] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e601c75d-20d3-48ca-b532-b555fb532340 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.195464] env[68194]: DEBUG nova.compute.utils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 630.198028] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ffd370f0-2aa0-44f6-97ea-dafa15bff837 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.204105] env[68194]: DEBUG nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 630.205493] env[68194]: DEBUG nova.network.neutron [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 630.207884] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 630.208476] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 630.208476] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 630.210311] env[68194]: DEBUG nova.compute.manager [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Received event network-changed-47ae3856-ea04-4667-a477-eb28e7e3135b {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 630.210497] env[68194]: DEBUG nova.compute.manager [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Refreshing instance network info cache due to event network-changed-47ae3856-ea04-4667-a477-eb28e7e3135b. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 630.210696] env[68194]: DEBUG oslo_concurrency.lockutils [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] Acquiring lock "refresh_cache-046c6f49-9cbd-4e5a-8a26-d0bc11a57d51" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 630.211448] env[68194]: DEBUG oslo_concurrency.lockutils [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] Acquired lock "refresh_cache-046c6f49-9cbd-4e5a-8a26-d0bc11a57d51" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 630.211448] env[68194]: DEBUG nova.network.neutron [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Refreshing network info cache for port 47ae3856-ea04-4667-a477-eb28e7e3135b {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 630.215802] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 630.216130] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 630.216898] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Created folder: Project (1ebb8ddc821344caa159a750cdef07ed) in parent group-v692426. [ 630.216898] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Creating folder: Instances. Parent ref: group-v692451. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 630.218027] env[68194]: DEBUG nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 630.223120] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0b77c3b7-1a59-445b-a744-997e30471718 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.223120] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c0202e78-d176-419b-a01e-bc9f82f07ed6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.224265] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5fa8dc5-c7c3-4345-8b4e-24cba7b941cf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.233294] env[68194]: DEBUG oslo_vmware.api [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Waiting for the task: (returnval){ [ 630.233294] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5229aabb-cf59-a235-4369-6af74d5f009e" [ 630.233294] env[68194]: _type = "Task" [ 630.233294] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.259106] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180957MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 630.259106] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 630.259106] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 630.265608] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Created folder: Instances in parent group-v692451. [ 630.265608] env[68194]: DEBUG oslo.service.loopingcall [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 630.265942] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 630.266572] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8b1a6cc8-556b-4d78-9826-baefb622824a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.291547] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 630.291547] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 630.291547] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 630.294678] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 630.294678] env[68194]: value = "task-3466754" [ 630.294678] env[68194]: _type = "Task" [ 630.294678] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.303039] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466754, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 630.331560] env[68194]: DEBUG nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 630.362033] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance a19ec81a-454a-41be-9a1b-37fc645b0c21 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 630.362033] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance f1fe2c36-316b-46e8-86d2-a71f018861f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 630.362191] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 630.363365] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance a390ced6-9e41-46f3-a330-72d745aeab91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 630.363365] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance dcdac63e-b5bb-4e53-ad3f-956a8b928e2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 630.363365] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 630.363365] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3d27a0be-599b-4bb4-89db-ff79d33047c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 630.363571] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 10df4090-9ec0-4876-8925-23e585344a3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 630.363571] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4d692986-413f-4c9b-b5cc-de43d2ca498d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 630.363571] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b8aaf064-e8a6-444a-83cd-6a7e02b82f33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 630.363571] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 630.363698] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 630.377352] env[68194]: DEBUG nova.virt.hardware [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 630.377618] env[68194]: DEBUG nova.virt.hardware [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 630.377724] env[68194]: DEBUG nova.virt.hardware [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 630.377893] env[68194]: DEBUG nova.virt.hardware [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 630.379320] env[68194]: DEBUG nova.virt.hardware [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 630.379582] env[68194]: DEBUG nova.virt.hardware [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 630.379943] env[68194]: DEBUG nova.virt.hardware [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 630.380191] env[68194]: DEBUG nova.virt.hardware [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 630.380421] env[68194]: DEBUG nova.virt.hardware [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 630.380586] env[68194]: DEBUG nova.virt.hardware [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 630.380813] env[68194]: DEBUG nova.virt.hardware [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 630.383129] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eeb837c4-4e7e-4609-a2d5-eb48a20d55bd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.397052] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6703f9de-b90d-45d0-944e-3ad1dbe1bb48 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.553136] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8208df88-5e9f-4e54-b9fc-f14ee601fc87 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.560839] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7786dfd1-9ace-4ff1-baeb-ca5ba78a422c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.599967] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07029d37-7b8f-4b41-9619-5c49f9494273 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.610062] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f023da7-667c-4ab7-9f2e-fb231183e8f2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.625687] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 630.629415] env[68194]: DEBUG nova.policy [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'fcf1e00bda164a5cb70db4ac49c9ac1b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'bdd04362cfb64ccd839fd7bf5ded433c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 630.640019] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 630.660121] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 630.660121] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.399s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 630.804554] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466754, 'name': CreateVM_Task, 'duration_secs': 0.332486} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 630.804733] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 630.805460] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 630.805629] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 630.806508] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 630.806508] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5feae02d-5376-45e9-8813-f1f6220c6b68 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 630.810889] env[68194]: DEBUG oslo_vmware.api [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Waiting for the task: (returnval){ [ 630.810889] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52493c96-7567-30a8-49ae-1b6fb323bab4" [ 630.810889] env[68194]: _type = "Task" [ 630.810889] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 630.820220] env[68194]: DEBUG oslo_vmware.api [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52493c96-7567-30a8-49ae-1b6fb323bab4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 631.324445] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 631.324445] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 631.324445] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 631.728899] env[68194]: DEBUG nova.network.neutron [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Updated VIF entry in instance network info cache for port 47ae3856-ea04-4667-a477-eb28e7e3135b. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 631.729334] env[68194]: DEBUG nova.network.neutron [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Updating instance_info_cache with network_info: [{"id": "47ae3856-ea04-4667-a477-eb28e7e3135b", "address": "fa:16:3e:e4:d4:47", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.228", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap47ae3856-ea", "ovs_interfaceid": "47ae3856-ea04-4667-a477-eb28e7e3135b", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 631.751313] env[68194]: DEBUG oslo_concurrency.lockutils [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] Releasing lock "refresh_cache-046c6f49-9cbd-4e5a-8a26-d0bc11a57d51" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 631.751677] env[68194]: DEBUG nova.compute.manager [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Received event network-vif-plugged-93f8486e-f79a-433e-948b-ff4452c90a20 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 631.751919] env[68194]: DEBUG oslo_concurrency.lockutils [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] Acquiring lock "3d27a0be-599b-4bb4-89db-ff79d33047c8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 631.752126] env[68194]: DEBUG oslo_concurrency.lockutils [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] Lock "3d27a0be-599b-4bb4-89db-ff79d33047c8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 631.752300] env[68194]: DEBUG oslo_concurrency.lockutils [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] Lock "3d27a0be-599b-4bb4-89db-ff79d33047c8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 631.752452] env[68194]: DEBUG nova.compute.manager [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] No waiting events found dispatching network-vif-plugged-93f8486e-f79a-433e-948b-ff4452c90a20 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 631.752624] env[68194]: WARNING nova.compute.manager [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Received unexpected event network-vif-plugged-93f8486e-f79a-433e-948b-ff4452c90a20 for instance with vm_state building and task_state spawning. [ 631.752789] env[68194]: DEBUG nova.compute.manager [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Received event network-vif-plugged-a8ce3fb4-c57b-41c9-b02c-b256b2144c15 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 631.752943] env[68194]: DEBUG oslo_concurrency.lockutils [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] Acquiring lock "10df4090-9ec0-4876-8925-23e585344a3b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 631.753679] env[68194]: DEBUG oslo_concurrency.lockutils [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] Lock "10df4090-9ec0-4876-8925-23e585344a3b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 631.754058] env[68194]: DEBUG oslo_concurrency.lockutils [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] Lock "10df4090-9ec0-4876-8925-23e585344a3b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 631.754366] env[68194]: DEBUG nova.compute.manager [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] No waiting events found dispatching network-vif-plugged-a8ce3fb4-c57b-41c9-b02c-b256b2144c15 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 631.754813] env[68194]: WARNING nova.compute.manager [req-953dbb31-d5fe-445f-9298-4083c9bdcb71 req-cf203152-02bb-45b7-9e28-37ccd166164e service nova] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Received unexpected event network-vif-plugged-a8ce3fb4-c57b-41c9-b02c-b256b2144c15 for instance with vm_state building and task_state spawning. [ 632.639161] env[68194]: DEBUG nova.network.neutron [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Successfully created port: 1058060e-6f0d-411b-b51f-f500bd704359 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 633.966177] env[68194]: DEBUG nova.compute.manager [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Received event network-changed-93f8486e-f79a-433e-948b-ff4452c90a20 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 633.966547] env[68194]: DEBUG nova.compute.manager [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Refreshing instance network info cache due to event network-changed-93f8486e-f79a-433e-948b-ff4452c90a20. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 633.966859] env[68194]: DEBUG oslo_concurrency.lockutils [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] Acquiring lock "refresh_cache-3d27a0be-599b-4bb4-89db-ff79d33047c8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 633.967373] env[68194]: DEBUG oslo_concurrency.lockutils [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] Acquired lock "refresh_cache-3d27a0be-599b-4bb4-89db-ff79d33047c8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 633.967664] env[68194]: DEBUG nova.network.neutron [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Refreshing network info cache for port 93f8486e-f79a-433e-948b-ff4452c90a20 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 635.742455] env[68194]: DEBUG nova.network.neutron [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Updated VIF entry in instance network info cache for port 93f8486e-f79a-433e-948b-ff4452c90a20. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 635.742977] env[68194]: DEBUG nova.network.neutron [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Updating instance_info_cache with network_info: [{"id": "93f8486e-f79a-433e-948b-ff4452c90a20", "address": "fa:16:3e:0a:0f:db", "network": {"id": "bc963e66-9d9c-4684-8233-bc6bee872cfb", "bridge": "br-int", "label": "tempest-ImagesNegativeTestJSON-1417635895-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "4ed51d359dab4078a43f2e0f55634aa6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a8140829-5eac-40d8-a10c-eb881f57affc", "external-id": "nsx-vlan-transportzone-517", "segmentation_id": 517, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap93f8486e-f7", "ovs_interfaceid": "93f8486e-f79a-433e-948b-ff4452c90a20", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 635.761124] env[68194]: DEBUG oslo_concurrency.lockutils [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] Releasing lock "refresh_cache-3d27a0be-599b-4bb4-89db-ff79d33047c8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 635.761124] env[68194]: DEBUG nova.compute.manager [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Received event network-changed-a8ce3fb4-c57b-41c9-b02c-b256b2144c15 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 635.761124] env[68194]: DEBUG nova.compute.manager [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Refreshing instance network info cache due to event network-changed-a8ce3fb4-c57b-41c9-b02c-b256b2144c15. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 635.761124] env[68194]: DEBUG oslo_concurrency.lockutils [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] Acquiring lock "refresh_cache-10df4090-9ec0-4876-8925-23e585344a3b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 635.761258] env[68194]: DEBUG oslo_concurrency.lockutils [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] Acquired lock "refresh_cache-10df4090-9ec0-4876-8925-23e585344a3b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 635.762304] env[68194]: DEBUG nova.network.neutron [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Refreshing network info cache for port a8ce3fb4-c57b-41c9-b02c-b256b2144c15 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 635.952373] env[68194]: DEBUG nova.network.neutron [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Successfully updated port: 1058060e-6f0d-411b-b51f-f500bd704359 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 635.969654] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquiring lock "refresh_cache-b8aaf064-e8a6-444a-83cd-6a7e02b82f33" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 635.970563] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquired lock "refresh_cache-b8aaf064-e8a6-444a-83cd-6a7e02b82f33" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 635.971019] env[68194]: DEBUG nova.network.neutron [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 636.188152] env[68194]: DEBUG nova.network.neutron [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 636.198987] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Acquiring lock "e575e1c7-7f35-41de-96e7-0771a4137bf5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 636.200236] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Lock "e575e1c7-7f35-41de-96e7-0771a4137bf5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 637.356292] env[68194]: DEBUG nova.network.neutron [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Updating instance_info_cache with network_info: [{"id": "1058060e-6f0d-411b-b51f-f500bd704359", "address": "fa:16:3e:f6:8c:51", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1058060e-6f", "ovs_interfaceid": "1058060e-6f0d-411b-b51f-f500bd704359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.371875] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Releasing lock "refresh_cache-b8aaf064-e8a6-444a-83cd-6a7e02b82f33" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 637.372215] env[68194]: DEBUG nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Instance network_info: |[{"id": "1058060e-6f0d-411b-b51f-f500bd704359", "address": "fa:16:3e:f6:8c:51", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1058060e-6f", "ovs_interfaceid": "1058060e-6f0d-411b-b51f-f500bd704359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 637.372612] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f6:8c:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1058060e-6f0d-411b-b51f-f500bd704359', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 637.380561] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Creating folder: Project (bdd04362cfb64ccd839fd7bf5ded433c). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 637.381328] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-3af72ff7-5544-4bf2-8a5d-27fcc044ede9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.393688] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Created folder: Project (bdd04362cfb64ccd839fd7bf5ded433c) in parent group-v692426. [ 637.393916] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Creating folder: Instances. Parent ref: group-v692454. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 637.394187] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd5f80eb-19ba-4d8d-88a4-cb72a2c5cff5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.410172] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Created folder: Instances in parent group-v692454. [ 637.410454] env[68194]: DEBUG oslo.service.loopingcall [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 637.411031] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 637.411187] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-62d236de-827c-442d-903e-a484555a70b5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 637.435360] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 637.435360] env[68194]: value = "task-3466757" [ 637.435360] env[68194]: _type = "Task" [ 637.435360] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 637.444856] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466757, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 637.484142] env[68194]: DEBUG nova.network.neutron [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Updated VIF entry in instance network info cache for port a8ce3fb4-c57b-41c9-b02c-b256b2144c15. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 637.484142] env[68194]: DEBUG nova.network.neutron [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Updating instance_info_cache with network_info: [{"id": "a8ce3fb4-c57b-41c9-b02c-b256b2144c15", "address": "fa:16:3e:8a:58:02", "network": {"id": "ac1efe91-b182-4d8f-9e3f-715439237030", "bridge": "br-int", "label": "tempest-ImagesOneServerTestJSON-44548300-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "706e87397b1c456a96b7778373e33d85", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "b4d548e7-d762-406a-bb2d-dc7168a8ca67", "external-id": "nsx-vlan-transportzone-796", "segmentation_id": 796, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa8ce3fb4-c5", "ovs_interfaceid": "a8ce3fb4-c57b-41c9-b02c-b256b2144c15", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 637.497496] env[68194]: DEBUG oslo_concurrency.lockutils [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] Releasing lock "refresh_cache-10df4090-9ec0-4876-8925-23e585344a3b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 637.497496] env[68194]: DEBUG nova.compute.manager [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Received event network-vif-plugged-8f0386c7-0b6b-4096-a8aa-c844cd9bf786 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 637.497496] env[68194]: DEBUG oslo_concurrency.lockutils [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] Acquiring lock "4d692986-413f-4c9b-b5cc-de43d2ca498d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 637.497635] env[68194]: DEBUG oslo_concurrency.lockutils [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] Lock "4d692986-413f-4c9b-b5cc-de43d2ca498d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 637.497785] env[68194]: DEBUG oslo_concurrency.lockutils [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] Lock "4d692986-413f-4c9b-b5cc-de43d2ca498d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 637.497953] env[68194]: DEBUG nova.compute.manager [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] No waiting events found dispatching network-vif-plugged-8f0386c7-0b6b-4096-a8aa-c844cd9bf786 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 637.498679] env[68194]: WARNING nova.compute.manager [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Received unexpected event network-vif-plugged-8f0386c7-0b6b-4096-a8aa-c844cd9bf786 for instance with vm_state building and task_state spawning. [ 637.498679] env[68194]: DEBUG nova.compute.manager [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Received event network-changed-8f0386c7-0b6b-4096-a8aa-c844cd9bf786 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 637.498679] env[68194]: DEBUG nova.compute.manager [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Refreshing instance network info cache due to event network-changed-8f0386c7-0b6b-4096-a8aa-c844cd9bf786. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 637.498679] env[68194]: DEBUG oslo_concurrency.lockutils [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] Acquiring lock "refresh_cache-4d692986-413f-4c9b-b5cc-de43d2ca498d" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 637.498966] env[68194]: DEBUG oslo_concurrency.lockutils [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] Acquired lock "refresh_cache-4d692986-413f-4c9b-b5cc-de43d2ca498d" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 637.498966] env[68194]: DEBUG nova.network.neutron [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Refreshing network info cache for port 8f0386c7-0b6b-4096-a8aa-c844cd9bf786 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 637.946889] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466757, 'name': CreateVM_Task} progress is 99%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.447260] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466757, 'name': CreateVM_Task, 'duration_secs': 0.553377} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 638.447633] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 638.448164] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 638.448336] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 638.448666] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 638.448930] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c001ee85-3335-46de-b456-ec6496316059 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 638.454863] env[68194]: DEBUG oslo_vmware.api [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Waiting for the task: (returnval){ [ 638.454863] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5277a247-3766-c8a0-aa7d-bd3fbdf1a691" [ 638.454863] env[68194]: _type = "Task" [ 638.454863] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 638.464612] env[68194]: DEBUG oslo_vmware.api [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5277a247-3766-c8a0-aa7d-bd3fbdf1a691, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 638.782306] env[68194]: DEBUG nova.network.neutron [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Updated VIF entry in instance network info cache for port 8f0386c7-0b6b-4096-a8aa-c844cd9bf786. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 638.783081] env[68194]: DEBUG nova.network.neutron [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Updating instance_info_cache with network_info: [{"id": "8f0386c7-0b6b-4096-a8aa-c844cd9bf786", "address": "fa:16:3e:5c:bf:59", "network": {"id": "5eb01266-94cc-4878-a54b-c76a69be2658", "bridge": "br-int", "label": "tempest-FloatingIPsAssociationNegativeTestJSON-251181878-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "1ebb8ddc821344caa159a750cdef07ed", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "a0d2101e-2d93-4310-a242-af2d9ecdaf9b", "external-id": "nsx-vlan-transportzone-121", "segmentation_id": 121, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap8f0386c7-0b", "ovs_interfaceid": "8f0386c7-0b6b-4096-a8aa-c844cd9bf786", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 638.800123] env[68194]: DEBUG oslo_concurrency.lockutils [req-e2b7f489-d2b7-41b7-89b4-5c568081fa53 req-31abd789-08d0-4e09-94a3-26073cbfce0b service nova] Releasing lock "refresh_cache-4d692986-413f-4c9b-b5cc-de43d2ca498d" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 638.966099] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 638.966367] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 638.966588] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 639.140365] env[68194]: DEBUG nova.compute.manager [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Received event network-vif-plugged-1058060e-6f0d-411b-b51f-f500bd704359 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 639.140662] env[68194]: DEBUG oslo_concurrency.lockutils [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] Acquiring lock "b8aaf064-e8a6-444a-83cd-6a7e02b82f33-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 639.140810] env[68194]: DEBUG oslo_concurrency.lockutils [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] Lock "b8aaf064-e8a6-444a-83cd-6a7e02b82f33-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 639.140995] env[68194]: DEBUG oslo_concurrency.lockutils [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] Lock "b8aaf064-e8a6-444a-83cd-6a7e02b82f33-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 639.142174] env[68194]: DEBUG nova.compute.manager [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] No waiting events found dispatching network-vif-plugged-1058060e-6f0d-411b-b51f-f500bd704359 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 639.142315] env[68194]: WARNING nova.compute.manager [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Received unexpected event network-vif-plugged-1058060e-6f0d-411b-b51f-f500bd704359 for instance with vm_state building and task_state spawning. [ 639.142489] env[68194]: DEBUG nova.compute.manager [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Received event network-changed-1058060e-6f0d-411b-b51f-f500bd704359 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 639.142655] env[68194]: DEBUG nova.compute.manager [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Refreshing instance network info cache due to event network-changed-1058060e-6f0d-411b-b51f-f500bd704359. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 639.142870] env[68194]: DEBUG oslo_concurrency.lockutils [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] Acquiring lock "refresh_cache-b8aaf064-e8a6-444a-83cd-6a7e02b82f33" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 639.143799] env[68194]: DEBUG oslo_concurrency.lockutils [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] Acquired lock "refresh_cache-b8aaf064-e8a6-444a-83cd-6a7e02b82f33" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 639.145752] env[68194]: DEBUG nova.network.neutron [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Refreshing network info cache for port 1058060e-6f0d-411b-b51f-f500bd704359 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 640.266061] env[68194]: DEBUG nova.network.neutron [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Updated VIF entry in instance network info cache for port 1058060e-6f0d-411b-b51f-f500bd704359. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 640.266061] env[68194]: DEBUG nova.network.neutron [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Updating instance_info_cache with network_info: [{"id": "1058060e-6f0d-411b-b51f-f500bd704359", "address": "fa:16:3e:f6:8c:51", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.28", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1058060e-6f", "ovs_interfaceid": "1058060e-6f0d-411b-b51f-f500bd704359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 640.278672] env[68194]: DEBUG oslo_concurrency.lockutils [req-dae78066-f25e-453a-bf33-3794ee194efd req-60a9d110-afff-4134-ad89-71af11067678 service nova] Releasing lock "refresh_cache-b8aaf064-e8a6-444a-83cd-6a7e02b82f33" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 650.483961] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Acquiring lock "fe79ae03-c408-4d18-914e-e64065998663" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 650.484326] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Lock "fe79ae03-c408-4d18-914e-e64065998663" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 650.532215] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Acquiring lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 650.532442] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 651.203216] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "2243c245-bbb3-43b7-89a9-fb727d452885" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 651.203455] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "2243c245-bbb3-43b7-89a9-fb727d452885" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 651.295145] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f78a9e09-79bc-4abb-bb0b-0524746de721 tempest-InstanceActionsNegativeTestJSON-2041831807 tempest-InstanceActionsNegativeTestJSON-2041831807-project-member] Acquiring lock "6b13d579-06d7-4bd4-a632-0cd978074902" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 651.295378] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f78a9e09-79bc-4abb-bb0b-0524746de721 tempest-InstanceActionsNegativeTestJSON-2041831807 tempest-InstanceActionsNegativeTestJSON-2041831807-project-member] Lock "6b13d579-06d7-4bd4-a632-0cd978074902" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 652.057035] env[68194]: DEBUG oslo_concurrency.lockutils [None req-44baac25-cf7a-40cf-b494-99a8ba377dba tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquiring lock "0156b780-3e46-4283-829c-9439698f3c8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 652.057650] env[68194]: DEBUG oslo_concurrency.lockutils [None req-44baac25-cf7a-40cf-b494-99a8ba377dba tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Lock "0156b780-3e46-4283-829c-9439698f3c8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 652.842728] env[68194]: DEBUG oslo_concurrency.lockutils [None req-74d3b6c7-4953-4721-a0de-6d1f80eb163e tempest-ServersTestFqdnHostnames-541581067 tempest-ServersTestFqdnHostnames-541581067-project-member] Acquiring lock "a5d1581e-4152-47fd-801b-e88f94dd0546" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 652.843028] env[68194]: DEBUG oslo_concurrency.lockutils [None req-74d3b6c7-4953-4721-a0de-6d1f80eb163e tempest-ServersTestFqdnHostnames-541581067 tempest-ServersTestFqdnHostnames-541581067-project-member] Lock "a5d1581e-4152-47fd-801b-e88f94dd0546" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 653.838794] env[68194]: DEBUG oslo_concurrency.lockutils [None req-89c3bd53-1942-43f5-b00e-c11c15c4aefc tempest-VolumesAssistedSnapshotsTest-515215701 tempest-VolumesAssistedSnapshotsTest-515215701-project-member] Acquiring lock "0db07fdf-30c1-4367-999d-d9e8e9763b45" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 653.839155] env[68194]: DEBUG oslo_concurrency.lockutils [None req-89c3bd53-1942-43f5-b00e-c11c15c4aefc tempest-VolumesAssistedSnapshotsTest-515215701 tempest-VolumesAssistedSnapshotsTest-515215701-project-member] Lock "0db07fdf-30c1-4367-999d-d9e8e9763b45" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 654.351150] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0a629d4c-cced-406e-8a43-74942619fdf7 tempest-ServersV294TestFqdnHostnames-1510252864 tempest-ServersV294TestFqdnHostnames-1510252864-project-member] Acquiring lock "0702f4fa-2d01-4be3-abe8-faa32566d65d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 654.351150] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0a629d4c-cced-406e-8a43-74942619fdf7 tempest-ServersV294TestFqdnHostnames-1510252864 tempest-ServersV294TestFqdnHostnames-1510252864-project-member] Lock "0702f4fa-2d01-4be3-abe8-faa32566d65d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 656.049875] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3f489d01-d7f0-4d41-94be-810a4645412d tempest-ServersWithSpecificFlavorTestJSON-1169256422 tempest-ServersWithSpecificFlavorTestJSON-1169256422-project-member] Acquiring lock "c0708080-3a59-4def-b90b-1c5959d317fb" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 656.050175] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3f489d01-d7f0-4d41-94be-810a4645412d tempest-ServersWithSpecificFlavorTestJSON-1169256422 tempest-ServersWithSpecificFlavorTestJSON-1169256422-project-member] Lock "c0708080-3a59-4def-b90b-1c5959d317fb" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 658.952763] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Acquiring lock "f1235b58-9673-4d54-ad1d-c48d4ff584e2" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 658.953069] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Lock "f1235b58-9673-4d54-ad1d-c48d4ff584e2" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 659.389028] env[68194]: WARNING oslo_vmware.rw_handles [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 659.389028] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 659.389028] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 659.389028] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 659.389028] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 659.389028] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 659.389028] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 659.389028] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 659.389028] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 659.389028] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 659.389028] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 659.389028] env[68194]: ERROR oslo_vmware.rw_handles [ 659.389504] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/0fcafe6c-9c6b-4ec6-82d0-92a3f49d4146/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 659.391014] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 659.391137] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Copying Virtual Disk [datastore1] vmware_temp/0fcafe6c-9c6b-4ec6-82d0-92a3f49d4146/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/0fcafe6c-9c6b-4ec6-82d0-92a3f49d4146/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 659.391418] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-cdd1bc36-0189-4c6c-aa13-6d4d3dfdf0ea {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.399478] env[68194]: DEBUG oslo_vmware.api [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Waiting for the task: (returnval){ [ 659.399478] env[68194]: value = "task-3466758" [ 659.399478] env[68194]: _type = "Task" [ 659.399478] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.407724] env[68194]: DEBUG oslo_vmware.api [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Task: {'id': task-3466758, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.912381] env[68194]: DEBUG oslo_vmware.exceptions [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 659.912658] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 659.915551] env[68194]: ERROR nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 659.915551] env[68194]: Faults: ['InvalidArgument'] [ 659.915551] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Traceback (most recent call last): [ 659.915551] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 659.915551] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] yield resources [ 659.915551] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 659.915551] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] self.driver.spawn(context, instance, image_meta, [ 659.915551] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 659.915551] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] self._vmops.spawn(context, instance, image_meta, injected_files, [ 659.915551] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 659.915551] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] self._fetch_image_if_missing(context, vi) [ 659.915551] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] image_cache(vi, tmp_image_ds_loc) [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] vm_util.copy_virtual_disk( [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] session._wait_for_task(vmdk_copy_task) [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] return self.wait_for_task(task_ref) [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] return evt.wait() [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] result = hub.switch() [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 659.916362] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] return self.greenlet.switch() [ 659.917018] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 659.917018] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] self.f(*self.args, **self.kw) [ 659.917018] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 659.917018] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] raise exceptions.translate_fault(task_info.error) [ 659.917018] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 659.917018] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Faults: ['InvalidArgument'] [ 659.917018] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] [ 659.917018] env[68194]: INFO nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Terminating instance [ 659.917554] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 659.917772] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 659.918042] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d85d171c-fe82-48ca-b4a3-fa9ac3d03fd7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.923022] env[68194]: DEBUG nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 659.923022] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 659.923022] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6739bd80-b21a-4901-9321-26afa54dd2cf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.928686] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 659.928861] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 659.931472] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-dc734302-afa4-4aca-9e39-5816748e52c3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.933847] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 659.934157] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77977809-908b-4d96-a47f-a969d0bd33c2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 659.938706] env[68194]: DEBUG oslo_vmware.api [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Waiting for the task: (returnval){ [ 659.938706] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]521528ab-e0bf-8b33-3ddd-d36f49c56617" [ 659.938706] env[68194]: _type = "Task" [ 659.938706] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 659.947619] env[68194]: DEBUG oslo_vmware.api [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]521528ab-e0bf-8b33-3ddd-d36f49c56617, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 659.998664] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 659.998664] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 659.998664] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Deleting the datastore file [datastore1] a19ec81a-454a-41be-9a1b-37fc645b0c21 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 659.998664] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-217b210c-500c-412e-bb41-028e5f362c14 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.002900] env[68194]: DEBUG oslo_vmware.api [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Waiting for the task: (returnval){ [ 660.002900] env[68194]: value = "task-3466760" [ 660.002900] env[68194]: _type = "Task" [ 660.002900] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 660.010978] env[68194]: DEBUG oslo_vmware.api [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Task: {'id': task-3466760, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 660.449284] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 660.449455] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Creating directory with path [datastore1] vmware_temp/3a332450-5f2a-4258-a4a9-928bb0035f2a/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 660.450061] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc621654-b578-4f91-9832-84a914292750 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.461980] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Created directory with path [datastore1] vmware_temp/3a332450-5f2a-4258-a4a9-928bb0035f2a/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 660.463028] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Fetch image to [datastore1] vmware_temp/3a332450-5f2a-4258-a4a9-928bb0035f2a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 660.463028] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/3a332450-5f2a-4258-a4a9-928bb0035f2a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 660.463389] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ab6ba1c-cc16-4b2c-bf8d-4e823bcf8063 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.470299] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4068833-4bf5-4467-b942-7c74679c4073 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.480897] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ea69388-6cf2-454b-a096-391a0462d889 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.521114] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-030f53ed-ca55-44fd-b972-16f132a36cd9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.528927] env[68194]: DEBUG oslo_vmware.api [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Task: {'id': task-3466760, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.247299} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 660.530601] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 660.530842] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 660.531068] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 660.531663] env[68194]: INFO nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Took 0.61 seconds to destroy the instance on the hypervisor. [ 660.533946] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a072f92e-f91c-4b45-9c94-163fb97f55a6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.536717] env[68194]: DEBUG nova.compute.claims [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 660.537016] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 660.537205] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 660.559710] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 660.641287] env[68194]: DEBUG oslo_vmware.rw_handles [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3a332450-5f2a-4258-a4a9-928bb0035f2a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 660.705591] env[68194]: DEBUG oslo_vmware.rw_handles [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 660.705795] env[68194]: DEBUG oslo_vmware.rw_handles [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/3a332450-5f2a-4258-a4a9-928bb0035f2a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 660.982485] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3658c02e-c41a-4eb8-88c1-a5d62891965a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 660.990802] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4caca339-9d3e-495a-8e2a-b7f0aece810d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.021354] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0d6277f6-d7e0-4ede-a945-2a173ec2a4ee {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.029041] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d36a6fe-6037-4156-b0ea-8104a5b49f86 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 661.043826] env[68194]: DEBUG nova.compute.provider_tree [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 661.057104] env[68194]: DEBUG nova.scheduler.client.report [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 661.077578] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.540s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 661.078125] env[68194]: ERROR nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 661.078125] env[68194]: Faults: ['InvalidArgument'] [ 661.078125] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Traceback (most recent call last): [ 661.078125] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 661.078125] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] self.driver.spawn(context, instance, image_meta, [ 661.078125] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 661.078125] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] self._vmops.spawn(context, instance, image_meta, injected_files, [ 661.078125] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 661.078125] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] self._fetch_image_if_missing(context, vi) [ 661.078125] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 661.078125] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] image_cache(vi, tmp_image_ds_loc) [ 661.078125] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] vm_util.copy_virtual_disk( [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] session._wait_for_task(vmdk_copy_task) [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] return self.wait_for_task(task_ref) [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] return evt.wait() [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] result = hub.switch() [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] return self.greenlet.switch() [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 661.078530] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] self.f(*self.args, **self.kw) [ 661.081262] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 661.081262] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] raise exceptions.translate_fault(task_info.error) [ 661.081262] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 661.081262] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Faults: ['InvalidArgument'] [ 661.081262] env[68194]: ERROR nova.compute.manager [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] [ 661.081262] env[68194]: DEBUG nova.compute.utils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 661.082153] env[68194]: DEBUG nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Build of instance a19ec81a-454a-41be-9a1b-37fc645b0c21 was re-scheduled: A specified parameter was not correct: fileType [ 661.082153] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 661.082598] env[68194]: DEBUG nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 661.082811] env[68194]: DEBUG nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 661.083012] env[68194]: DEBUG nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 661.083236] env[68194]: DEBUG nova.network.neutron [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 661.511648] env[68194]: DEBUG nova.network.neutron [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 661.523774] env[68194]: INFO nova.compute.manager [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] Took 0.44 seconds to deallocate network for instance. [ 661.655021] env[68194]: INFO nova.scheduler.client.report [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Deleted allocations for instance a19ec81a-454a-41be-9a1b-37fc645b0c21 [ 661.690026] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6c222ef2-1f9b-4d9e-a8fd-a29199331e1d tempest-ServerDiagnosticsTest-1204042750 tempest-ServerDiagnosticsTest-1204042750-project-member] Lock "a19ec81a-454a-41be-9a1b-37fc645b0c21" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 56.380s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 661.692873] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "a19ec81a-454a-41be-9a1b-37fc645b0c21" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 50.245s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 661.693091] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: a19ec81a-454a-41be-9a1b-37fc645b0c21] During sync_power_state the instance has a pending task (spawning). Skip. [ 661.693272] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "a19ec81a-454a-41be-9a1b-37fc645b0c21" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 661.723171] env[68194]: DEBUG nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 661.808112] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 661.808846] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 661.810011] env[68194]: INFO nova.compute.claims [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 662.200281] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2970cb38-b931-476e-bb51-94f92b554631 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.212581] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f2597e0-404c-42eb-9ccf-63799aeb1489 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.219659] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquiring lock "108001a3-ff36-475b-a7a5-8e0e197c62a8" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 662.219892] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Lock "108001a3-ff36-475b-a7a5-8e0e197c62a8" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 662.251312] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b60936c2-ffd5-4f63-9a75-37cc78fbe294 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.259474] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7870a026-7267-4067-bcfa-452a097f634d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.273477] env[68194]: DEBUG nova.compute.provider_tree [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 662.282065] env[68194]: DEBUG nova.scheduler.client.report [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 662.296730] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.488s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 662.297218] env[68194]: DEBUG nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 662.332175] env[68194]: DEBUG nova.compute.utils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 662.336429] env[68194]: DEBUG nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 662.336429] env[68194]: DEBUG nova.network.neutron [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 662.347890] env[68194]: DEBUG nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 662.399206] env[68194]: DEBUG nova.policy [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd01bf277a139439394d9922f8b9d58fd', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '2c189bf551804af889f716efa492085a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 662.423325] env[68194]: DEBUG nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 662.452354] env[68194]: DEBUG nova.virt.hardware [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 662.452749] env[68194]: DEBUG nova.virt.hardware [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 662.452960] env[68194]: DEBUG nova.virt.hardware [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 662.453197] env[68194]: DEBUG nova.virt.hardware [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 662.453388] env[68194]: DEBUG nova.virt.hardware [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 662.453621] env[68194]: DEBUG nova.virt.hardware [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 662.453908] env[68194]: DEBUG nova.virt.hardware [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 662.454173] env[68194]: DEBUG nova.virt.hardware [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 662.454412] env[68194]: DEBUG nova.virt.hardware [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 662.454630] env[68194]: DEBUG nova.virt.hardware [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 662.454848] env[68194]: DEBUG nova.virt.hardware [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 662.455797] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d46c7224-b1e1-41fc-9c0b-a4264816adee {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.464696] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bed519d-bc06-4fb7-9600-e44f2e4545a3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 662.669911] env[68194]: DEBUG nova.network.neutron [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Successfully created port: 66148232-3a04-4912-8f75-998056bb43fd {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 663.279937] env[68194]: DEBUG nova.network.neutron [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Successfully updated port: 66148232-3a04-4912-8f75-998056bb43fd {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 663.296258] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Acquiring lock "refresh_cache-e575e1c7-7f35-41de-96e7-0771a4137bf5" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 663.296410] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Acquired lock "refresh_cache-e575e1c7-7f35-41de-96e7-0771a4137bf5" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 663.296557] env[68194]: DEBUG nova.network.neutron [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 663.338066] env[68194]: DEBUG nova.network.neutron [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 663.674023] env[68194]: DEBUG nova.network.neutron [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Updating instance_info_cache with network_info: [{"id": "66148232-3a04-4912-8f75-998056bb43fd", "address": "fa:16:3e:1f:ff:a5", "network": {"id": "b737b4d8-3572-471c-a44c-e7f8622f60d8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1688871767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c189bf551804af889f716efa492085a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66148232-3a", "ovs_interfaceid": "66148232-3a04-4912-8f75-998056bb43fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 663.691809] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Releasing lock "refresh_cache-e575e1c7-7f35-41de-96e7-0771a4137bf5" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 663.692071] env[68194]: DEBUG nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Instance network_info: |[{"id": "66148232-3a04-4912-8f75-998056bb43fd", "address": "fa:16:3e:1f:ff:a5", "network": {"id": "b737b4d8-3572-471c-a44c-e7f8622f60d8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1688871767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c189bf551804af889f716efa492085a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66148232-3a", "ovs_interfaceid": "66148232-3a04-4912-8f75-998056bb43fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 663.692466] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:1f:ff:a5', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2d859f07-052d-4a69-bdf1-24261a6a6daa', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '66148232-3a04-4912-8f75-998056bb43fd', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 663.704281] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Creating folder: Project (2c189bf551804af889f716efa492085a). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 663.704563] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e6dddbbc-3da5-466c-8bb8-f20510a6ff99 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.716665] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Created folder: Project (2c189bf551804af889f716efa492085a) in parent group-v692426. [ 663.716665] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Creating folder: Instances. Parent ref: group-v692460. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 663.716859] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e8e1ace7-d458-4e69-828c-59239b12a5d3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.725653] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Created folder: Instances in parent group-v692460. [ 663.725979] env[68194]: DEBUG oslo.service.loopingcall [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 663.726072] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 663.726285] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-2749f13e-ec3a-4cb9-9188-351545559b31 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 663.751317] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 663.751317] env[68194]: value = "task-3466767" [ 663.751317] env[68194]: _type = "Task" [ 663.751317] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 663.761188] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466767, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.262914] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466767, 'name': CreateVM_Task, 'duration_secs': 0.305038} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 664.263551] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 664.264377] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 664.264710] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 664.267644] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 664.268035] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8ff5360d-2f9c-4ba2-a679-7cd9288baad9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 664.274496] env[68194]: DEBUG oslo_vmware.api [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Waiting for the task: (returnval){ [ 664.274496] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52c1d3e5-9561-d9c2-28d7-cc99a5fafc93" [ 664.274496] env[68194]: _type = "Task" [ 664.274496] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 664.284544] env[68194]: DEBUG oslo_vmware.api [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52c1d3e5-9561-d9c2-28d7-cc99a5fafc93, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 664.794480] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 664.794799] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 664.795055] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 664.832285] env[68194]: DEBUG nova.compute.manager [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Received event network-vif-plugged-66148232-3a04-4912-8f75-998056bb43fd {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 664.833831] env[68194]: DEBUG oslo_concurrency.lockutils [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] Acquiring lock "e575e1c7-7f35-41de-96e7-0771a4137bf5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 664.833831] env[68194]: DEBUG oslo_concurrency.lockutils [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] Lock "e575e1c7-7f35-41de-96e7-0771a4137bf5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 664.833831] env[68194]: DEBUG oslo_concurrency.lockutils [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] Lock "e575e1c7-7f35-41de-96e7-0771a4137bf5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 664.833831] env[68194]: DEBUG nova.compute.manager [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] No waiting events found dispatching network-vif-plugged-66148232-3a04-4912-8f75-998056bb43fd {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 664.834282] env[68194]: WARNING nova.compute.manager [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Received unexpected event network-vif-plugged-66148232-3a04-4912-8f75-998056bb43fd for instance with vm_state building and task_state spawning. [ 664.834282] env[68194]: DEBUG nova.compute.manager [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Received event network-changed-66148232-3a04-4912-8f75-998056bb43fd {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 664.834282] env[68194]: DEBUG nova.compute.manager [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Refreshing instance network info cache due to event network-changed-66148232-3a04-4912-8f75-998056bb43fd. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 664.834282] env[68194]: DEBUG oslo_concurrency.lockutils [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] Acquiring lock "refresh_cache-e575e1c7-7f35-41de-96e7-0771a4137bf5" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 664.834282] env[68194]: DEBUG oslo_concurrency.lockutils [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] Acquired lock "refresh_cache-e575e1c7-7f35-41de-96e7-0771a4137bf5" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 664.834531] env[68194]: DEBUG nova.network.neutron [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Refreshing network info cache for port 66148232-3a04-4912-8f75-998056bb43fd {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 665.494698] env[68194]: DEBUG nova.network.neutron [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Updated VIF entry in instance network info cache for port 66148232-3a04-4912-8f75-998056bb43fd. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 665.496834] env[68194]: DEBUG nova.network.neutron [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Updating instance_info_cache with network_info: [{"id": "66148232-3a04-4912-8f75-998056bb43fd", "address": "fa:16:3e:1f:ff:a5", "network": {"id": "b737b4d8-3572-471c-a44c-e7f8622f60d8", "bridge": "br-int", "label": "tempest-InstanceActionsV221TestJSON-1688871767-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "2c189bf551804af889f716efa492085a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2d859f07-052d-4a69-bdf1-24261a6a6daa", "external-id": "nsx-vlan-transportzone-684", "segmentation_id": 684, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap66148232-3a", "ovs_interfaceid": "66148232-3a04-4912-8f75-998056bb43fd", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 665.506408] env[68194]: DEBUG oslo_concurrency.lockutils [req-49eedc73-ae76-48fe-807b-82a9d70c4273 req-83cbda6a-00dd-4df2-ad31-c2350621607d service nova] Releasing lock "refresh_cache-e575e1c7-7f35-41de-96e7-0771a4137bf5" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 673.486796] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b643a34-2c87-40bb-8b75-03c00a0e959f tempest-ServerRescueTestJSON-1260696433 tempest-ServerRescueTestJSON-1260696433-project-member] Acquiring lock "e2e34a7a-d419-4fc8-ae82-ff5874aa23d1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 673.487133] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b643a34-2c87-40bb-8b75-03c00a0e959f tempest-ServerRescueTestJSON-1260696433 tempest-ServerRescueTestJSON-1260696433-project-member] Lock "e2e34a7a-d419-4fc8-ae82-ff5874aa23d1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 673.492707] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1218b793-1eb3-4678-8d4c-bb645b0c9ed2 tempest-AttachInterfacesV270Test-476421757 tempest-AttachInterfacesV270Test-476421757-project-member] Acquiring lock "d9b69c9f-9ad6-4605-a7b5-54eed2035cc0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 673.492919] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1218b793-1eb3-4678-8d4c-bb645b0c9ed2 tempest-AttachInterfacesV270Test-476421757 tempest-AttachInterfacesV270Test-476421757-project-member] Lock "d9b69c9f-9ad6-4605-a7b5-54eed2035cc0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 675.376503] env[68194]: DEBUG oslo_concurrency.lockutils [None req-67740fa2-2ee2-49b0-86fc-573f46319b6e tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] Acquiring lock "afe91255-b954-4518-b0ca-a1f4ddcfd9ba" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 675.376987] env[68194]: DEBUG oslo_concurrency.lockutils [None req-67740fa2-2ee2-49b0-86fc-573f46319b6e tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] Lock "afe91255-b954-4518-b0ca-a1f4ddcfd9ba" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 675.413517] env[68194]: DEBUG oslo_concurrency.lockutils [None req-67740fa2-2ee2-49b0-86fc-573f46319b6e tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] Acquiring lock "cfa6ea83-c10f-4c87-860a-b26fb80e5f12" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 675.413755] env[68194]: DEBUG oslo_concurrency.lockutils [None req-67740fa2-2ee2-49b0-86fc-573f46319b6e tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] Lock "cfa6ea83-c10f-4c87-860a-b26fb80e5f12" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 679.941778] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9cdb4304-f2e0-404e-8659-c4635e91ca7b tempest-ServerGroupTestJSON-627746257 tempest-ServerGroupTestJSON-627746257-project-member] Acquiring lock "edfef42a-a968-4c36-92a4-e608037eab3f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 679.942195] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9cdb4304-f2e0-404e-8659-c4635e91ca7b tempest-ServerGroupTestJSON-627746257 tempest-ServerGroupTestJSON-627746257-project-member] Lock "edfef42a-a968-4c36-92a4-e608037eab3f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 681.098813] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b29f273b-c9cb-40ee-ac4f-1992fd4c4cc6 tempest-AttachInterfacesUnderV243Test-948469958 tempest-AttachInterfacesUnderV243Test-948469958-project-member] Acquiring lock "bcaa671a-38d8-45a5-b772-7d856594f700" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 681.099352] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b29f273b-c9cb-40ee-ac4f-1992fd4c4cc6 tempest-AttachInterfacesUnderV243Test-948469958 tempest-AttachInterfacesUnderV243Test-948469958-project-member] Lock "bcaa671a-38d8-45a5-b772-7d856594f700" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 681.765756] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7d03cc73-1071-425b-91c3-da8f2538cc87 tempest-ServersTestBootFromVolume-1892992509 tempest-ServersTestBootFromVolume-1892992509-project-member] Acquiring lock "faf174e0-4af8-4f58-be5d-2f8915b0d58b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 681.765940] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7d03cc73-1071-425b-91c3-da8f2538cc87 tempest-ServersTestBootFromVolume-1892992509 tempest-ServersTestBootFromVolume-1892992509-project-member] Lock "faf174e0-4af8-4f58-be5d-2f8915b0d58b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 690.644394] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 690.644394] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 690.669192] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 690.669192] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 690.669192] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 690.693272] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 690.693423] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 690.693552] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 690.693675] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 690.693793] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 690.693910] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 690.694039] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 690.694161] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 690.694278] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 690.694422] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 690.694555] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 690.695029] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 690.695197] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 690.695353] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.416021] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.416273] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.416502] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.416573] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 691.416718] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 691.428715] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 691.428935] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 691.429701] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 691.429701] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 691.430417] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e465b120-4ab2-45f1-b35e-ff9cdecf4394 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.439894] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-671bdce2-f412-4a2e-a206-735c5d2a272a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.454402] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b77ce09-9dcc-4c18-a7e5-327e5d1256b8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.461405] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9e646ea-55fb-4eaf-804e-c4af798bb95b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 691.492898] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180931MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 691.493062] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 691.493266] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 691.597080] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance f1fe2c36-316b-46e8-86d2-a71f018861f0 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 691.597254] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 691.597384] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance a390ced6-9e41-46f3-a330-72d745aeab91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 691.597508] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance dcdac63e-b5bb-4e53-ad3f-956a8b928e2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 691.597662] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 691.597791] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3d27a0be-599b-4bb4-89db-ff79d33047c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 691.597909] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 10df4090-9ec0-4876-8925-23e585344a3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 691.598042] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4d692986-413f-4c9b-b5cc-de43d2ca498d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 691.598176] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b8aaf064-e8a6-444a-83cd-6a7e02b82f33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 691.598442] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e575e1c7-7f35-41de-96e7-0771a4137bf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 691.624110] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fe79ae03-c408-4d18-914e-e64065998663 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.647100] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cef5dc8e-1a5c-4248-9bac-ff25880588ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.657999] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2243c245-bbb3-43b7-89a9-fb727d452885 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.667570] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 6b13d579-06d7-4bd4-a632-0cd978074902 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.677300] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 0156b780-3e46-4283-829c-9439698f3c8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.686883] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance a5d1581e-4152-47fd-801b-e88f94dd0546 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.695653] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 0db07fdf-30c1-4367-999d-d9e8e9763b45 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.705266] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 0702f4fa-2d01-4be3-abe8-faa32566d65d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.717415] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance c0708080-3a59-4def-b90b-1c5959d317fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.727823] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance f1235b58-9673-4d54-ad1d-c48d4ff584e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.737648] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.747912] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d9b69c9f-9ad6-4605-a7b5-54eed2035cc0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.759747] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e2e34a7a-d419-4fc8-ae82-ff5874aa23d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.770180] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance afe91255-b954-4518-b0ca-a1f4ddcfd9ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.780805] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cfa6ea83-c10f-4c87-860a-b26fb80e5f12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.790741] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance edfef42a-a968-4c36-92a4-e608037eab3f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.800613] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcaa671a-38d8-45a5-b772-7d856594f700 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.811047] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance faf174e0-4af8-4f58-be5d-2f8915b0d58b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 691.811047] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 691.811047] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 692.131450] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fedd7c55-db0b-4224-ba2b-00c1fa75e627 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.139806] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8606137b-1d98-4f80-9582-430ff94eb4ae {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.170050] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ced3c405-4549-4367-8eca-c41cd9558a22 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.177878] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d8b8bb-9ecf-4daf-afb9-7632b80bd954 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 692.191332] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 692.200421] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 692.214817] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 692.215014] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.722s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 707.630601] env[68194]: WARNING oslo_vmware.rw_handles [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 707.630601] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 707.630601] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 707.630601] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 707.630601] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 707.630601] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 707.630601] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 707.630601] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 707.630601] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 707.630601] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 707.630601] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 707.630601] env[68194]: ERROR oslo_vmware.rw_handles [ 707.630601] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/3a332450-5f2a-4258-a4a9-928bb0035f2a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 707.631772] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 707.632024] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Copying Virtual Disk [datastore1] vmware_temp/3a332450-5f2a-4258-a4a9-928bb0035f2a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/3a332450-5f2a-4258-a4a9-928bb0035f2a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 707.632430] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a38e10bf-c764-49f3-bf9b-adb714567099 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 707.641102] env[68194]: DEBUG oslo_vmware.api [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Waiting for the task: (returnval){ [ 707.641102] env[68194]: value = "task-3466784" [ 707.641102] env[68194]: _type = "Task" [ 707.641102] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 707.649298] env[68194]: DEBUG oslo_vmware.api [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Task: {'id': task-3466784, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.151927] env[68194]: DEBUG oslo_vmware.exceptions [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 708.152353] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 708.152968] env[68194]: ERROR nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 708.152968] env[68194]: Faults: ['InvalidArgument'] [ 708.152968] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Traceback (most recent call last): [ 708.152968] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 708.152968] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] yield resources [ 708.152968] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 708.152968] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] self.driver.spawn(context, instance, image_meta, [ 708.152968] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 708.152968] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 708.152968] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 708.152968] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] self._fetch_image_if_missing(context, vi) [ 708.152968] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] image_cache(vi, tmp_image_ds_loc) [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] vm_util.copy_virtual_disk( [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] session._wait_for_task(vmdk_copy_task) [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] return self.wait_for_task(task_ref) [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] return evt.wait() [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] result = hub.switch() [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 708.153747] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] return self.greenlet.switch() [ 708.154505] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 708.154505] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] self.f(*self.args, **self.kw) [ 708.154505] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 708.154505] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] raise exceptions.translate_fault(task_info.error) [ 708.154505] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 708.154505] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Faults: ['InvalidArgument'] [ 708.154505] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] [ 708.154505] env[68194]: INFO nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Terminating instance [ 708.154972] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 708.155285] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.155564] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f5351cf-041e-468d-86d3-4abf79370c2f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.157979] env[68194]: DEBUG nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 708.158249] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 708.159031] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-08dceffc-65f5-4a2a-8f82-7198d805753d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.165820] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 708.166083] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-77578bec-ae3d-426c-aa05-0a7052196560 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.168205] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.168427] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 708.169376] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0dec8718-830e-4ec7-ad75-f1b40c3a15ce {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.175383] env[68194]: DEBUG oslo_vmware.api [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Waiting for the task: (returnval){ [ 708.175383] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5229b0e1-e622-5487-89dc-8923eb50d1e9" [ 708.175383] env[68194]: _type = "Task" [ 708.175383] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.187399] env[68194]: DEBUG oslo_vmware.api [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5229b0e1-e622-5487-89dc-8923eb50d1e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.248746] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 708.248984] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 708.249223] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Deleting the datastore file [datastore1] f1fe2c36-316b-46e8-86d2-a71f018861f0 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 708.249502] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-028b60e1-4952-4923-81a5-0b35ba95f3d1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.256507] env[68194]: DEBUG oslo_vmware.api [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Waiting for the task: (returnval){ [ 708.256507] env[68194]: value = "task-3466787" [ 708.256507] env[68194]: _type = "Task" [ 708.256507] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 708.264579] env[68194]: DEBUG oslo_vmware.api [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Task: {'id': task-3466787, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 708.686699] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 708.686998] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Creating directory with path [datastore1] vmware_temp/10791afd-9bc4-4c93-9c1f-77ed2e69f55d/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 708.687189] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c7ec9976-e3c0-4c51-bba8-054694213b60 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.699964] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Created directory with path [datastore1] vmware_temp/10791afd-9bc4-4c93-9c1f-77ed2e69f55d/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 708.700199] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Fetch image to [datastore1] vmware_temp/10791afd-9bc4-4c93-9c1f-77ed2e69f55d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 708.700374] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/10791afd-9bc4-4c93-9c1f-77ed2e69f55d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 708.701215] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93ffb504-1b37-45d5-ba7f-7b4129aa76f4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.708693] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-410db0d6-1bee-4a78-b511-740a33cdf66e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.718087] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85189d96-574b-4a7c-ad57-94e3bde466d1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.748865] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6a22653-e307-4623-9412-51c03db90364 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.755483] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-23c4c3fe-2de1-4766-9992-d6204bb8804e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 708.765062] env[68194]: DEBUG oslo_vmware.api [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Task: {'id': task-3466787, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.118686} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 708.765182] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 708.765311] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 708.765488] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 708.765663] env[68194]: INFO nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Took 0.61 seconds to destroy the instance on the hypervisor. [ 708.767757] env[68194]: DEBUG nova.compute.claims [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 708.767928] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 708.768175] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 708.783815] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 708.842466] env[68194]: DEBUG oslo_vmware.rw_handles [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/10791afd-9bc4-4c93-9c1f-77ed2e69f55d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 708.904871] env[68194]: DEBUG oslo_vmware.rw_handles [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 708.904871] env[68194]: DEBUG oslo_vmware.rw_handles [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/10791afd-9bc4-4c93-9c1f-77ed2e69f55d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 709.202284] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66f1da34-0504-463f-b31d-7445d795b297 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.211627] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6a162386-5ab4-490d-8a6c-406574a42bfd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.240855] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf449b92-2e54-4d58-b31f-6ca836ad3bde {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.248704] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd53e5f2-e094-4d5f-9511-36512754ac18 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 709.262510] env[68194]: DEBUG nova.compute.provider_tree [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 709.271318] env[68194]: DEBUG nova.scheduler.client.report [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 709.290132] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.522s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 709.290677] env[68194]: ERROR nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 709.290677] env[68194]: Faults: ['InvalidArgument'] [ 709.290677] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Traceback (most recent call last): [ 709.290677] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 709.290677] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] self.driver.spawn(context, instance, image_meta, [ 709.290677] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 709.290677] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] self._vmops.spawn(context, instance, image_meta, injected_files, [ 709.290677] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 709.290677] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] self._fetch_image_if_missing(context, vi) [ 709.290677] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 709.290677] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] image_cache(vi, tmp_image_ds_loc) [ 709.290677] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] vm_util.copy_virtual_disk( [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] session._wait_for_task(vmdk_copy_task) [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] return self.wait_for_task(task_ref) [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] return evt.wait() [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] result = hub.switch() [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] return self.greenlet.switch() [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 709.291137] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] self.f(*self.args, **self.kw) [ 709.291730] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 709.291730] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] raise exceptions.translate_fault(task_info.error) [ 709.291730] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 709.291730] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Faults: ['InvalidArgument'] [ 709.291730] env[68194]: ERROR nova.compute.manager [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] [ 709.291730] env[68194]: DEBUG nova.compute.utils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 709.293172] env[68194]: DEBUG nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Build of instance f1fe2c36-316b-46e8-86d2-a71f018861f0 was re-scheduled: A specified parameter was not correct: fileType [ 709.293172] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 709.293547] env[68194]: DEBUG nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 709.293719] env[68194]: DEBUG nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 709.293918] env[68194]: DEBUG nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 709.294068] env[68194]: DEBUG nova.network.neutron [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 709.616843] env[68194]: DEBUG nova.network.neutron [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 709.628019] env[68194]: INFO nova.compute.manager [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] Took 0.33 seconds to deallocate network for instance. [ 709.729524] env[68194]: INFO nova.scheduler.client.report [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Deleted allocations for instance f1fe2c36-316b-46e8-86d2-a71f018861f0 [ 709.762679] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e87c00e5-fe51-4977-b1f7-ce81cba38192 tempest-FloatingIPsAssociationTestJSON-1694522271 tempest-FloatingIPsAssociationTestJSON-1694522271-project-member] Lock "f1fe2c36-316b-46e8-86d2-a71f018861f0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 98.967s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 709.763853] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "f1fe2c36-316b-46e8-86d2-a71f018861f0" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 98.316s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 709.764125] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: f1fe2c36-316b-46e8-86d2-a71f018861f0] During sync_power_state the instance has a pending task (spawning). Skip. [ 709.764253] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "f1fe2c36-316b-46e8-86d2-a71f018861f0" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 709.806389] env[68194]: DEBUG nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 709.892450] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 709.892514] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 709.894617] env[68194]: INFO nova.compute.claims [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 710.277130] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba33690a-c218-45c8-99ea-3852b03d4a21 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.287342] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85929b58-f03b-4191-843f-1448a624dbb0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.318154] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c55ea7f-a3b5-4baf-83f9-24634d82f758 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.326171] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3367d067-4f93-4e85-85fe-4e32681f4da7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.340097] env[68194]: DEBUG nova.compute.provider_tree [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 710.348333] env[68194]: DEBUG nova.scheduler.client.report [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 710.367659] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.475s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 710.368202] env[68194]: DEBUG nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 710.405111] env[68194]: DEBUG nova.compute.utils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 710.410019] env[68194]: DEBUG nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 710.410019] env[68194]: DEBUG nova.network.neutron [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 710.417923] env[68194]: DEBUG nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 710.466773] env[68194]: DEBUG nova.policy [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9f511774d6ef48ba903de900c0837ff2', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '20264dcede3f41f1be3c200575c72cfb', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 710.489996] env[68194]: DEBUG nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 710.518691] env[68194]: DEBUG nova.virt.hardware [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 710.518916] env[68194]: DEBUG nova.virt.hardware [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 710.519081] env[68194]: DEBUG nova.virt.hardware [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 710.519916] env[68194]: DEBUG nova.virt.hardware [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 710.519916] env[68194]: DEBUG nova.virt.hardware [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 710.519916] env[68194]: DEBUG nova.virt.hardware [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 710.519916] env[68194]: DEBUG nova.virt.hardware [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 710.519916] env[68194]: DEBUG nova.virt.hardware [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 710.520176] env[68194]: DEBUG nova.virt.hardware [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 710.520272] env[68194]: DEBUG nova.virt.hardware [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 710.520580] env[68194]: DEBUG nova.virt.hardware [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 710.521440] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-33000500-b0ab-4ed5-8c4d-b5c18039b487 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.530255] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-468b3fe5-20d2-469a-aa86-ef0bef9d40cb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 710.854276] env[68194]: DEBUG nova.network.neutron [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Successfully created port: 89bf8788-bb23-4791-af6a-bbc4a5506174 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 711.044682] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Acquiring lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 711.044894] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 711.482841] env[68194]: DEBUG nova.network.neutron [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Successfully updated port: 89bf8788-bb23-4791-af6a-bbc4a5506174 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 711.493437] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Acquiring lock "refresh_cache-fe79ae03-c408-4d18-914e-e64065998663" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 711.493674] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Acquired lock "refresh_cache-fe79ae03-c408-4d18-914e-e64065998663" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 711.494031] env[68194]: DEBUG nova.network.neutron [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 711.537689] env[68194]: DEBUG nova.network.neutron [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 711.716009] env[68194]: DEBUG nova.network.neutron [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Updating instance_info_cache with network_info: [{"id": "89bf8788-bb23-4791-af6a-bbc4a5506174", "address": "fa:16:3e:85:49:15", "network": {"id": "35d48270-2ece-40e3-8f54-fb270f395681", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1311683017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20264dcede3f41f1be3c200575c72cfb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89bf8788-bb", "ovs_interfaceid": "89bf8788-bb23-4791-af6a-bbc4a5506174", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 711.728101] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Releasing lock "refresh_cache-fe79ae03-c408-4d18-914e-e64065998663" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 711.728419] env[68194]: DEBUG nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Instance network_info: |[{"id": "89bf8788-bb23-4791-af6a-bbc4a5506174", "address": "fa:16:3e:85:49:15", "network": {"id": "35d48270-2ece-40e3-8f54-fb270f395681", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1311683017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20264dcede3f41f1be3c200575c72cfb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89bf8788-bb", "ovs_interfaceid": "89bf8788-bb23-4791-af6a-bbc4a5506174", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 711.728800] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:85:49:15', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ee20e439-fed9-490e-97dd-f3c886977ae1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '89bf8788-bb23-4791-af6a-bbc4a5506174', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 711.736410] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Creating folder: Project (20264dcede3f41f1be3c200575c72cfb). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 711.736963] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-63b75b05-2bdf-41f6-b279-ba8e9efb34d6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.750040] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Created folder: Project (20264dcede3f41f1be3c200575c72cfb) in parent group-v692426. [ 711.750272] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Creating folder: Instances. Parent ref: group-v692468. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 711.750551] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5e33ada9-844e-48af-8535-6de15a5c6d22 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.762534] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Created folder: Instances in parent group-v692468. [ 711.762835] env[68194]: DEBUG oslo.service.loopingcall [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 711.763034] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe79ae03-c408-4d18-914e-e64065998663] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 711.763291] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ea4beece-73a7-4d2b-be6f-6cfd96576b0c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 711.785898] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 711.785898] env[68194]: value = "task-3466790" [ 711.785898] env[68194]: _type = "Task" [ 711.785898] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 711.795381] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466790, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.094546] env[68194]: DEBUG nova.compute.manager [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] [instance: fe79ae03-c408-4d18-914e-e64065998663] Received event network-vif-plugged-89bf8788-bb23-4791-af6a-bbc4a5506174 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 712.094836] env[68194]: DEBUG oslo_concurrency.lockutils [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] Acquiring lock "fe79ae03-c408-4d18-914e-e64065998663-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 712.095028] env[68194]: DEBUG oslo_concurrency.lockutils [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] Lock "fe79ae03-c408-4d18-914e-e64065998663-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 712.095224] env[68194]: DEBUG oslo_concurrency.lockutils [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] Lock "fe79ae03-c408-4d18-914e-e64065998663-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 712.095422] env[68194]: DEBUG nova.compute.manager [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] [instance: fe79ae03-c408-4d18-914e-e64065998663] No waiting events found dispatching network-vif-plugged-89bf8788-bb23-4791-af6a-bbc4a5506174 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 712.095684] env[68194]: WARNING nova.compute.manager [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] [instance: fe79ae03-c408-4d18-914e-e64065998663] Received unexpected event network-vif-plugged-89bf8788-bb23-4791-af6a-bbc4a5506174 for instance with vm_state building and task_state spawning. [ 712.095823] env[68194]: DEBUG nova.compute.manager [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] [instance: fe79ae03-c408-4d18-914e-e64065998663] Received event network-changed-89bf8788-bb23-4791-af6a-bbc4a5506174 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 712.096099] env[68194]: DEBUG nova.compute.manager [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] [instance: fe79ae03-c408-4d18-914e-e64065998663] Refreshing instance network info cache due to event network-changed-89bf8788-bb23-4791-af6a-bbc4a5506174. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 712.096155] env[68194]: DEBUG oslo_concurrency.lockutils [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] Acquiring lock "refresh_cache-fe79ae03-c408-4d18-914e-e64065998663" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 712.096320] env[68194]: DEBUG oslo_concurrency.lockutils [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] Acquired lock "refresh_cache-fe79ae03-c408-4d18-914e-e64065998663" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 712.096425] env[68194]: DEBUG nova.network.neutron [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] [instance: fe79ae03-c408-4d18-914e-e64065998663] Refreshing network info cache for port 89bf8788-bb23-4791-af6a-bbc4a5506174 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 712.298221] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466790, 'name': CreateVM_Task, 'duration_secs': 0.340516} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 712.298406] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: fe79ae03-c408-4d18-914e-e64065998663] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 712.299079] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 712.299252] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 712.299841] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 712.300300] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a9d28890-b8bf-4767-9db0-c02eb33c544b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 712.305542] env[68194]: DEBUG oslo_vmware.api [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Waiting for the task: (returnval){ [ 712.305542] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52360fa6-a2f8-aae1-113e-c0f28db4525e" [ 712.305542] env[68194]: _type = "Task" [ 712.305542] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 712.317840] env[68194]: DEBUG oslo_vmware.api [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52360fa6-a2f8-aae1-113e-c0f28db4525e, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 712.357027] env[68194]: DEBUG nova.network.neutron [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] [instance: fe79ae03-c408-4d18-914e-e64065998663] Updated VIF entry in instance network info cache for port 89bf8788-bb23-4791-af6a-bbc4a5506174. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 712.357027] env[68194]: DEBUG nova.network.neutron [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] [instance: fe79ae03-c408-4d18-914e-e64065998663] Updating instance_info_cache with network_info: [{"id": "89bf8788-bb23-4791-af6a-bbc4a5506174", "address": "fa:16:3e:85:49:15", "network": {"id": "35d48270-2ece-40e3-8f54-fb270f395681", "bridge": "br-int", "label": "tempest-ServersNegativeTestJSON-1311683017-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "20264dcede3f41f1be3c200575c72cfb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ee20e439-fed9-490e-97dd-f3c886977ae1", "external-id": "nsx-vlan-transportzone-357", "segmentation_id": 357, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap89bf8788-bb", "ovs_interfaceid": "89bf8788-bb23-4791-af6a-bbc4a5506174", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 712.366595] env[68194]: DEBUG oslo_concurrency.lockutils [req-93e1a7cb-d4e9-4b43-98e5-bca10c2d58fa req-4ba02250-1e98-451a-ac8e-4d9e388a5fa8 service nova] Releasing lock "refresh_cache-fe79ae03-c408-4d18-914e-e64065998663" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 712.824056] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 712.824261] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 712.824595] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 728.717511] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquiring lock "d2e2cf0b-1028-4df3-9170-dc616a04fdc3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 728.717820] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Lock "d2e2cf0b-1028-4df3-9170-dc616a04fdc3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 750.217222] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.411966] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.415626] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.415801] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 751.415926] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 751.440280] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 751.440361] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 751.440481] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 751.440614] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 751.442398] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 751.442398] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 751.442398] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 751.442398] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 751.442398] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 751.442723] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: fe79ae03-c408-4d18-914e-e64065998663] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 751.442723] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 751.442723] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.442723] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.442723] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.442723] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 751.442959] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 752.416076] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 753.416640] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 753.428115] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 753.428365] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 753.428561] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 753.428747] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 753.429893] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-025a0ffe-710a-4e27-a9ef-8ce98e53f7d1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.438756] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-008f03d4-c184-46b4-8df4-595984893c5e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.452781] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3374647-d354-4c6e-8fdc-0f34fe4b9c58 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.459499] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abc596f5-3fb8-4aed-91c5-6e66f76d8840 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 753.489536] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180913MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 753.489687] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 753.489930] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 753.564899] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.565126] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance a390ced6-9e41-46f3-a330-72d745aeab91 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.565264] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance dcdac63e-b5bb-4e53-ad3f-956a8b928e2c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.565391] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.565514] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3d27a0be-599b-4bb4-89db-ff79d33047c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.565633] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 10df4090-9ec0-4876-8925-23e585344a3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.565748] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4d692986-413f-4c9b-b5cc-de43d2ca498d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.565863] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b8aaf064-e8a6-444a-83cd-6a7e02b82f33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.565975] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e575e1c7-7f35-41de-96e7-0771a4137bf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.566105] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fe79ae03-c408-4d18-914e-e64065998663 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 753.577715] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cef5dc8e-1a5c-4248-9bac-ff25880588ed has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.587937] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2243c245-bbb3-43b7-89a9-fb727d452885 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.600330] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 6b13d579-06d7-4bd4-a632-0cd978074902 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.610721] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 0156b780-3e46-4283-829c-9439698f3c8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.620643] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance a5d1581e-4152-47fd-801b-e88f94dd0546 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.630014] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 0db07fdf-30c1-4367-999d-d9e8e9763b45 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.639523] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 0702f4fa-2d01-4be3-abe8-faa32566d65d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.650751] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance c0708080-3a59-4def-b90b-1c5959d317fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.665952] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance f1235b58-9673-4d54-ad1d-c48d4ff584e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.675467] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.684599] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d9b69c9f-9ad6-4605-a7b5-54eed2035cc0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.694798] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e2e34a7a-d419-4fc8-ae82-ff5874aa23d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.707920] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance afe91255-b954-4518-b0ca-a1f4ddcfd9ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.717564] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cfa6ea83-c10f-4c87-860a-b26fb80e5f12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.727765] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance edfef42a-a968-4c36-92a4-e608037eab3f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.737491] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcaa671a-38d8-45a5-b772-7d856594f700 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.747751] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance faf174e0-4af8-4f58-be5d-2f8915b0d58b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.758550] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance a6920ad4-bf1c-4daa-9b9a-81e782c88a20 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.769300] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 753.769534] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 753.769684] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 754.070495] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-719bb089-986a-4aef-988c-69ab70b6141b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.078091] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2707c35-119e-459f-999b-30dbbf4efb24 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.107714] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af95bf06-5664-4ae9-a721-98b1e0625ef7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.115287] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bed27a2-a008-4e7a-9891-09b6772d82ec {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.128079] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 754.139448] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 754.152848] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 754.153071] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.663s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 754.422041] env[68194]: WARNING oslo_vmware.rw_handles [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 754.422041] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 754.422041] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 754.422041] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 754.422041] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 754.422041] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 754.422041] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 754.422041] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 754.422041] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 754.422041] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 754.422041] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 754.422041] env[68194]: ERROR oslo_vmware.rw_handles [ 754.422041] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/10791afd-9bc4-4c93-9c1f-77ed2e69f55d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 754.423459] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 754.423797] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Copying Virtual Disk [datastore1] vmware_temp/10791afd-9bc4-4c93-9c1f-77ed2e69f55d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/10791afd-9bc4-4c93-9c1f-77ed2e69f55d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 754.424166] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-76f888c7-0e82-41e2-ab73-f14a3414b6b4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.434440] env[68194]: DEBUG oslo_vmware.api [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Waiting for the task: (returnval){ [ 754.434440] env[68194]: value = "task-3466791" [ 754.434440] env[68194]: _type = "Task" [ 754.434440] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.442601] env[68194]: DEBUG oslo_vmware.api [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Task: {'id': task-3466791, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.945303] env[68194]: DEBUG oslo_vmware.exceptions [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 754.945581] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 754.946150] env[68194]: ERROR nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 754.946150] env[68194]: Faults: ['InvalidArgument'] [ 754.946150] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Traceback (most recent call last): [ 754.946150] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 754.946150] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] yield resources [ 754.946150] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 754.946150] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] self.driver.spawn(context, instance, image_meta, [ 754.946150] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 754.946150] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] self._vmops.spawn(context, instance, image_meta, injected_files, [ 754.946150] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 754.946150] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] self._fetch_image_if_missing(context, vi) [ 754.946150] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] image_cache(vi, tmp_image_ds_loc) [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] vm_util.copy_virtual_disk( [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] session._wait_for_task(vmdk_copy_task) [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] return self.wait_for_task(task_ref) [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] return evt.wait() [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] result = hub.switch() [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 754.946481] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] return self.greenlet.switch() [ 754.946921] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 754.946921] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] self.f(*self.args, **self.kw) [ 754.946921] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 754.946921] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] raise exceptions.translate_fault(task_info.error) [ 754.946921] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 754.946921] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Faults: ['InvalidArgument'] [ 754.946921] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] [ 754.946921] env[68194]: INFO nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Terminating instance [ 754.948237] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 754.948533] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 754.949068] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Acquiring lock "refresh_cache-a390ced6-9e41-46f3-a330-72d745aeab91" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 754.949235] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Acquired lock "refresh_cache-a390ced6-9e41-46f3-a330-72d745aeab91" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 754.949401] env[68194]: DEBUG nova.network.neutron [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 754.950285] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bd49c44d-d6fa-4f93-b164-516fe8ad9cb6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.960429] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 754.960602] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 754.961601] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-728fecb1-25c5-46cf-97f5-4f566c959cbb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 754.966760] env[68194]: DEBUG oslo_vmware.api [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Waiting for the task: (returnval){ [ 754.966760] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52bbb93f-53c2-14cd-ad1b-36993a5522a4" [ 754.966760] env[68194]: _type = "Task" [ 754.966760] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 754.974467] env[68194]: DEBUG oslo_vmware.api [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52bbb93f-53c2-14cd-ad1b-36993a5522a4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 754.976956] env[68194]: DEBUG nova.network.neutron [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 755.061325] env[68194]: DEBUG nova.network.neutron [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 755.069857] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Releasing lock "refresh_cache-a390ced6-9e41-46f3-a330-72d745aeab91" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 755.070315] env[68194]: DEBUG nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 755.070586] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 755.071631] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02d75fc7-cef8-4aa5-b81e-f9f01b1315da {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.079932] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 755.080204] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-27bc2614-dab3-471a-87d1-6757aa8305c6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.115900] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 755.116141] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 755.116326] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Deleting the datastore file [datastore1] a390ced6-9e41-46f3-a330-72d745aeab91 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 755.116575] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-f253a5a9-2aa9-4682-94f7-08b7423e7ff7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.122022] env[68194]: DEBUG oslo_vmware.api [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Waiting for the task: (returnval){ [ 755.122022] env[68194]: value = "task-3466793" [ 755.122022] env[68194]: _type = "Task" [ 755.122022] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 755.129769] env[68194]: DEBUG oslo_vmware.api [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Task: {'id': task-3466793, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 755.477096] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 755.477424] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Creating directory with path [datastore1] vmware_temp/ccd176fd-5783-4c85-9259-87ca293181e7/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 755.477574] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e93189b9-cd16-481a-b508-bf84f5119464 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.488956] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Created directory with path [datastore1] vmware_temp/ccd176fd-5783-4c85-9259-87ca293181e7/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 755.489167] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Fetch image to [datastore1] vmware_temp/ccd176fd-5783-4c85-9259-87ca293181e7/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 755.489338] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/ccd176fd-5783-4c85-9259-87ca293181e7/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 755.490061] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ace66cbc-efd3-47d7-ab66-121b10938f70 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.496856] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ea3c534-2b32-493e-b0de-1a18cc23ffe7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.505698] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8635402a-4658-4fb4-b5a2-31781b16bc56 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.535643] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b576f9ea-f4b5-44cf-b999-1564485de5e7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.541097] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-f0e8ddc8-fe4e-496b-8631-febf5474645f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 755.569695] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 755.620265] env[68194]: DEBUG oslo_vmware.rw_handles [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ccd176fd-5783-4c85-9259-87ca293181e7/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 755.679432] env[68194]: DEBUG oslo_vmware.rw_handles [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 755.679617] env[68194]: DEBUG oslo_vmware.rw_handles [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/ccd176fd-5783-4c85-9259-87ca293181e7/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 755.683817] env[68194]: DEBUG oslo_vmware.api [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Task: {'id': task-3466793, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031862} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 755.684073] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 755.684254] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 755.684423] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 755.684610] env[68194]: INFO nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Took 0.61 seconds to destroy the instance on the hypervisor. [ 755.684847] env[68194]: DEBUG oslo.service.loopingcall [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 755.685056] env[68194]: DEBUG nova.compute.manager [-] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Skipping network deallocation for instance since networking was not requested. {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 755.687253] env[68194]: DEBUG nova.compute.claims [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 755.687405] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 755.687613] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 756.066821] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5705e132-2fae-4519-9580-9f0200c4a16d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.076392] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7bb4e9f5-a0f3-4543-a40d-c982d057f042 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.106045] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-56c3d784-6b0e-43d3-8925-8358016bf38d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.112988] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49564cc3-c3c4-498f-95bb-74b56b60b2f0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.125892] env[68194]: DEBUG nova.compute.provider_tree [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.137577] env[68194]: DEBUG nova.scheduler.client.report [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 756.150877] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.463s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 756.151417] env[68194]: ERROR nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 756.151417] env[68194]: Faults: ['InvalidArgument'] [ 756.151417] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Traceback (most recent call last): [ 756.151417] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 756.151417] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] self.driver.spawn(context, instance, image_meta, [ 756.151417] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 756.151417] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] self._vmops.spawn(context, instance, image_meta, injected_files, [ 756.151417] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 756.151417] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] self._fetch_image_if_missing(context, vi) [ 756.151417] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 756.151417] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] image_cache(vi, tmp_image_ds_loc) [ 756.151417] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] vm_util.copy_virtual_disk( [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] session._wait_for_task(vmdk_copy_task) [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] return self.wait_for_task(task_ref) [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] return evt.wait() [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] result = hub.switch() [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] return self.greenlet.switch() [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 756.151732] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] self.f(*self.args, **self.kw) [ 756.152076] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 756.152076] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] raise exceptions.translate_fault(task_info.error) [ 756.152076] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 756.152076] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Faults: ['InvalidArgument'] [ 756.152076] env[68194]: ERROR nova.compute.manager [instance: a390ced6-9e41-46f3-a330-72d745aeab91] [ 756.152214] env[68194]: DEBUG nova.compute.utils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 756.153814] env[68194]: DEBUG nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Build of instance a390ced6-9e41-46f3-a330-72d745aeab91 was re-scheduled: A specified parameter was not correct: fileType [ 756.153814] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 756.154205] env[68194]: DEBUG nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 756.154426] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Acquiring lock "refresh_cache-a390ced6-9e41-46f3-a330-72d745aeab91" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 756.154576] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Acquired lock "refresh_cache-a390ced6-9e41-46f3-a330-72d745aeab91" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 756.154735] env[68194]: DEBUG nova.network.neutron [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 756.181528] env[68194]: DEBUG nova.network.neutron [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 756.243032] env[68194]: DEBUG nova.network.neutron [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 756.252664] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Releasing lock "refresh_cache-a390ced6-9e41-46f3-a330-72d745aeab91" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 756.252664] env[68194]: DEBUG nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 756.252808] env[68194]: DEBUG nova.compute.manager [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] [instance: a390ced6-9e41-46f3-a330-72d745aeab91] Skipping network deallocation for instance since networking was not requested. {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 756.349548] env[68194]: INFO nova.scheduler.client.report [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Deleted allocations for instance a390ced6-9e41-46f3-a330-72d745aeab91 [ 756.373410] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ec09a6da-e4c0-4358-87a6-d126fd256474 tempest-ServersAdmin275Test-1814564948 tempest-ServersAdmin275Test-1814564948-project-member] Lock "a390ced6-9e41-46f3-a330-72d745aeab91" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 139.464s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 756.395635] env[68194]: DEBUG nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 756.461491] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 756.461749] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 756.463334] env[68194]: INFO nova.compute.claims [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 756.895449] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9da7d8d-ae9d-4533-bc00-c7957ac2c3b3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.905135] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6458c3f2-798a-4cff-9edb-c408d3cf204e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.936044] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb32801d-4e6a-48ae-8c4c-94e3f1e84b97 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.943717] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-004300a2-8490-4eea-a994-6da2a14e1129 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 756.959070] env[68194]: DEBUG nova.compute.provider_tree [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 756.968444] env[68194]: DEBUG nova.scheduler.client.report [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 756.983810] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.522s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 756.984384] env[68194]: DEBUG nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 757.021935] env[68194]: DEBUG nova.compute.utils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 757.023763] env[68194]: DEBUG nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 757.024177] env[68194]: DEBUG nova.network.neutron [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 757.035738] env[68194]: DEBUG nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 757.107141] env[68194]: DEBUG nova.policy [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'd50c0152cf4842bfafd4f7e1c22da96b', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a9bfff76cf2d48e09d0e5980b4b8df65', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 757.115796] env[68194]: DEBUG nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 757.145450] env[68194]: DEBUG nova.virt.hardware [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 757.145844] env[68194]: DEBUG nova.virt.hardware [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 757.146129] env[68194]: DEBUG nova.virt.hardware [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 757.146443] env[68194]: DEBUG nova.virt.hardware [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 757.146699] env[68194]: DEBUG nova.virt.hardware [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 757.146955] env[68194]: DEBUG nova.virt.hardware [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 757.147315] env[68194]: DEBUG nova.virt.hardware [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 757.147585] env[68194]: DEBUG nova.virt.hardware [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 757.147871] env[68194]: DEBUG nova.virt.hardware [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 757.148166] env[68194]: DEBUG nova.virt.hardware [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 757.148575] env[68194]: DEBUG nova.virt.hardware [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 757.149490] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c7099f8b-1369-467b-aecb-06aae48e8adf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.160817] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e141f99-f57b-4d7e-8576-9893242a6168 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 757.466634] env[68194]: DEBUG nova.network.neutron [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Successfully created port: e497bc34-f903-41be-ad0f-82908aa0fee0 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 758.130248] env[68194]: DEBUG nova.network.neutron [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Successfully updated port: e497bc34-f903-41be-ad0f-82908aa0fee0 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 758.143282] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Acquiring lock "refresh_cache-cef5dc8e-1a5c-4248-9bac-ff25880588ed" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 758.143425] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Acquired lock "refresh_cache-cef5dc8e-1a5c-4248-9bac-ff25880588ed" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 758.143605] env[68194]: DEBUG nova.network.neutron [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 758.207982] env[68194]: DEBUG nova.network.neutron [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 758.307542] env[68194]: DEBUG nova.compute.manager [req-6878a9e0-61e9-4968-a6b4-dfe2bf0a0c14 req-256587a4-d5de-4d0b-8b87-4dcea1b3caab service nova] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Received event network-vif-plugged-e497bc34-f903-41be-ad0f-82908aa0fee0 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 758.307542] env[68194]: DEBUG oslo_concurrency.lockutils [req-6878a9e0-61e9-4968-a6b4-dfe2bf0a0c14 req-256587a4-d5de-4d0b-8b87-4dcea1b3caab service nova] Acquiring lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 758.307542] env[68194]: DEBUG oslo_concurrency.lockutils [req-6878a9e0-61e9-4968-a6b4-dfe2bf0a0c14 req-256587a4-d5de-4d0b-8b87-4dcea1b3caab service nova] Lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 758.307542] env[68194]: DEBUG oslo_concurrency.lockutils [req-6878a9e0-61e9-4968-a6b4-dfe2bf0a0c14 req-256587a4-d5de-4d0b-8b87-4dcea1b3caab service nova] Lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 758.307837] env[68194]: DEBUG nova.compute.manager [req-6878a9e0-61e9-4968-a6b4-dfe2bf0a0c14 req-256587a4-d5de-4d0b-8b87-4dcea1b3caab service nova] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] No waiting events found dispatching network-vif-plugged-e497bc34-f903-41be-ad0f-82908aa0fee0 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 758.307837] env[68194]: WARNING nova.compute.manager [req-6878a9e0-61e9-4968-a6b4-dfe2bf0a0c14 req-256587a4-d5de-4d0b-8b87-4dcea1b3caab service nova] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Received unexpected event network-vif-plugged-e497bc34-f903-41be-ad0f-82908aa0fee0 for instance with vm_state building and task_state spawning. [ 758.391623] env[68194]: DEBUG nova.network.neutron [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Updating instance_info_cache with network_info: [{"id": "e497bc34-f903-41be-ad0f-82908aa0fee0", "address": "fa:16:3e:02:15:b7", "network": {"id": "58102361-6459-4792-bc20-c4dc3ddcae8b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2100047065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9bfff76cf2d48e09d0e5980b4b8df65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape497bc34-f9", "ovs_interfaceid": "e497bc34-f903-41be-ad0f-82908aa0fee0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 758.406581] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Releasing lock "refresh_cache-cef5dc8e-1a5c-4248-9bac-ff25880588ed" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 758.406783] env[68194]: DEBUG nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Instance network_info: |[{"id": "e497bc34-f903-41be-ad0f-82908aa0fee0", "address": "fa:16:3e:02:15:b7", "network": {"id": "58102361-6459-4792-bc20-c4dc3ddcae8b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2100047065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9bfff76cf2d48e09d0e5980b4b8df65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape497bc34-f9", "ovs_interfaceid": "e497bc34-f903-41be-ad0f-82908aa0fee0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 758.407472] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:02:15:b7', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '7b83383f-ed7a-4efd-aef7-aa8c15649d07', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'e497bc34-f903-41be-ad0f-82908aa0fee0', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 758.414703] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Creating folder: Project (a9bfff76cf2d48e09d0e5980b4b8df65). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 758.415283] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d8b3783e-84e4-45e7-9023-2747ab002ceb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.427337] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Created folder: Project (a9bfff76cf2d48e09d0e5980b4b8df65) in parent group-v692426. [ 758.427533] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Creating folder: Instances. Parent ref: group-v692471. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 758.427756] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1579b4b-4665-4926-b876-aaaf7243844e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.437285] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Created folder: Instances in parent group-v692471. [ 758.437542] env[68194]: DEBUG oslo.service.loopingcall [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 758.437732] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 758.437933] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4c38bc8d-4428-46ad-aba8-c8a08a577cb3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.456702] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 758.456702] env[68194]: value = "task-3466796" [ 758.456702] env[68194]: _type = "Task" [ 758.456702] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.464268] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466796, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 758.966819] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466796, 'name': CreateVM_Task, 'duration_secs': 0.319041} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 758.966996] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 758.967698] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 758.967891] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 758.968327] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 758.968549] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e6142e1b-c9c3-4522-b543-510d9ebfccc9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 758.973047] env[68194]: DEBUG oslo_vmware.api [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Waiting for the task: (returnval){ [ 758.973047] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5233ac6c-dec3-fe20-4465-2a9e1e6859ef" [ 758.973047] env[68194]: _type = "Task" [ 758.973047] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 758.980583] env[68194]: DEBUG oslo_vmware.api [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5233ac6c-dec3-fe20-4465-2a9e1e6859ef, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 759.483516] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 759.483789] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 759.483981] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 759.702697] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquiring lock "ff16d7c1-a601-4ac6-be52-823727c8b843" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 759.702697] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Lock "ff16d7c1-a601-4ac6-be52-823727c8b843" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 760.381114] env[68194]: DEBUG nova.compute.manager [req-bf7bb824-370e-4229-b811-4036516062b6 req-f9cc5f9b-3ef2-42a9-8bc1-203320600ab4 service nova] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Received event network-changed-e497bc34-f903-41be-ad0f-82908aa0fee0 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 760.381186] env[68194]: DEBUG nova.compute.manager [req-bf7bb824-370e-4229-b811-4036516062b6 req-f9cc5f9b-3ef2-42a9-8bc1-203320600ab4 service nova] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Refreshing instance network info cache due to event network-changed-e497bc34-f903-41be-ad0f-82908aa0fee0. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 760.381380] env[68194]: DEBUG oslo_concurrency.lockutils [req-bf7bb824-370e-4229-b811-4036516062b6 req-f9cc5f9b-3ef2-42a9-8bc1-203320600ab4 service nova] Acquiring lock "refresh_cache-cef5dc8e-1a5c-4248-9bac-ff25880588ed" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 760.381594] env[68194]: DEBUG oslo_concurrency.lockutils [req-bf7bb824-370e-4229-b811-4036516062b6 req-f9cc5f9b-3ef2-42a9-8bc1-203320600ab4 service nova] Acquired lock "refresh_cache-cef5dc8e-1a5c-4248-9bac-ff25880588ed" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 760.381765] env[68194]: DEBUG nova.network.neutron [req-bf7bb824-370e-4229-b811-4036516062b6 req-f9cc5f9b-3ef2-42a9-8bc1-203320600ab4 service nova] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Refreshing network info cache for port e497bc34-f903-41be-ad0f-82908aa0fee0 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 760.800285] env[68194]: DEBUG nova.network.neutron [req-bf7bb824-370e-4229-b811-4036516062b6 req-f9cc5f9b-3ef2-42a9-8bc1-203320600ab4 service nova] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Updated VIF entry in instance network info cache for port e497bc34-f903-41be-ad0f-82908aa0fee0. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 760.800741] env[68194]: DEBUG nova.network.neutron [req-bf7bb824-370e-4229-b811-4036516062b6 req-f9cc5f9b-3ef2-42a9-8bc1-203320600ab4 service nova] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Updating instance_info_cache with network_info: [{"id": "e497bc34-f903-41be-ad0f-82908aa0fee0", "address": "fa:16:3e:02:15:b7", "network": {"id": "58102361-6459-4792-bc20-c4dc3ddcae8b", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherA-2100047065-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a9bfff76cf2d48e09d0e5980b4b8df65", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "7b83383f-ed7a-4efd-aef7-aa8c15649d07", "external-id": "nsx-vlan-transportzone-282", "segmentation_id": 282, "bound_drivers": {"0": "nsxv3"}}, "devname": "tape497bc34-f9", "ovs_interfaceid": "e497bc34-f903-41be-ad0f-82908aa0fee0", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 760.811083] env[68194]: DEBUG oslo_concurrency.lockutils [req-bf7bb824-370e-4229-b811-4036516062b6 req-f9cc5f9b-3ef2-42a9-8bc1-203320600ab4 service nova] Releasing lock "refresh_cache-cef5dc8e-1a5c-4248-9bac-ff25880588ed" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 803.278178] env[68194]: WARNING oslo_vmware.rw_handles [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 803.278178] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 803.278178] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 803.278178] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 803.278178] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 803.278178] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 803.278178] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 803.278178] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 803.278178] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 803.278178] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 803.278178] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 803.278178] env[68194]: ERROR oslo_vmware.rw_handles [ 803.278178] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/ccd176fd-5783-4c85-9259-87ca293181e7/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 803.278842] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 803.278842] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Copying Virtual Disk [datastore1] vmware_temp/ccd176fd-5783-4c85-9259-87ca293181e7/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/ccd176fd-5783-4c85-9259-87ca293181e7/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 803.278842] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8b7a98d4-2268-43db-a5e6-38cf6b7be17d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.288444] env[68194]: DEBUG oslo_vmware.api [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Waiting for the task: (returnval){ [ 803.288444] env[68194]: value = "task-3466797" [ 803.288444] env[68194]: _type = "Task" [ 803.288444] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.296791] env[68194]: DEBUG oslo_vmware.api [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Task: {'id': task-3466797, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.799408] env[68194]: DEBUG oslo_vmware.exceptions [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 803.800029] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 803.800515] env[68194]: ERROR nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 803.800515] env[68194]: Faults: ['InvalidArgument'] [ 803.800515] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Traceback (most recent call last): [ 803.800515] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 803.800515] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] yield resources [ 803.800515] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 803.800515] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] self.driver.spawn(context, instance, image_meta, [ 803.800515] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 803.800515] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 803.800515] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 803.800515] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] self._fetch_image_if_missing(context, vi) [ 803.800515] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] image_cache(vi, tmp_image_ds_loc) [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] vm_util.copy_virtual_disk( [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] session._wait_for_task(vmdk_copy_task) [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] return self.wait_for_task(task_ref) [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] return evt.wait() [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] result = hub.switch() [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 803.800840] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] return self.greenlet.switch() [ 803.801158] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 803.801158] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] self.f(*self.args, **self.kw) [ 803.801158] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 803.801158] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] raise exceptions.translate_fault(task_info.error) [ 803.801158] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 803.801158] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Faults: ['InvalidArgument'] [ 803.801158] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] [ 803.801158] env[68194]: INFO nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Terminating instance [ 803.802484] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 803.802769] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 803.802936] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e15a8717-ed93-465c-bf83-92e236b5c65f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.805019] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Acquiring lock "refresh_cache-dcdac63e-b5bb-4e53-ad3f-956a8b928e2c" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 803.805190] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Acquired lock "refresh_cache-dcdac63e-b5bb-4e53-ad3f-956a8b928e2c" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 803.805356] env[68194]: DEBUG nova.network.neutron [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 803.812874] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 803.813090] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 803.815512] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bfc9cc3c-6fc9-4208-95e6-f1f0852c773d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.823155] env[68194]: DEBUG oslo_vmware.api [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Waiting for the task: (returnval){ [ 803.823155] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]528709f3-861e-2f1f-210a-7edad74d66df" [ 803.823155] env[68194]: _type = "Task" [ 803.823155] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.830967] env[68194]: DEBUG oslo_vmware.api [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]528709f3-861e-2f1f-210a-7edad74d66df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 803.841020] env[68194]: DEBUG nova.network.neutron [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 803.901692] env[68194]: DEBUG nova.network.neutron [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 803.911085] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Releasing lock "refresh_cache-dcdac63e-b5bb-4e53-ad3f-956a8b928e2c" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 803.911537] env[68194]: DEBUG nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 803.911731] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 803.912864] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8cf9e35e-11e0-44d7-92aa-3ecaa9fd20aa {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.920730] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 803.923936] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-58854511-71ba-4254-8cce-8db0a5c6f2e8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.948421] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 803.948666] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 803.948819] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Deleting the datastore file [datastore1] dcdac63e-b5bb-4e53-ad3f-956a8b928e2c {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 803.949092] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-0b71d2b3-0a62-42e2-b677-b79efbe224fc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 803.954922] env[68194]: DEBUG oslo_vmware.api [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Waiting for the task: (returnval){ [ 803.954922] env[68194]: value = "task-3466799" [ 803.954922] env[68194]: _type = "Task" [ 803.954922] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 803.962376] env[68194]: DEBUG oslo_vmware.api [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Task: {'id': task-3466799, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 804.333239] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 804.333554] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Creating directory with path [datastore1] vmware_temp/9575dfac-79e6-498d-bddf-1a0862500d08/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 804.333727] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c025e4a5-7429-4cdb-83ea-e2b296ad08bd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.347158] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Created directory with path [datastore1] vmware_temp/9575dfac-79e6-498d-bddf-1a0862500d08/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 804.347389] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Fetch image to [datastore1] vmware_temp/9575dfac-79e6-498d-bddf-1a0862500d08/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 804.347584] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/9575dfac-79e6-498d-bddf-1a0862500d08/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 804.348315] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8343607-7bb4-4f06-952e-7340c168a77f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.355858] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-148e8f39-2b69-4b3f-80e9-0df3a5d2b659 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.364629] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12a2a127-11ac-48b7-aa64-250868e0f0f3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.394343] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3ae873eb-ca6d-4bbf-9fb4-acb2125d2718 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.399906] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-23ef5602-f9f6-4775-8bf4-94bea9ed970d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.430772] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 804.466429] env[68194]: DEBUG oslo_vmware.api [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Task: {'id': task-3466799, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.052532} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 804.466691] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 804.466873] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 804.467105] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 804.467388] env[68194]: INFO nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Took 0.56 seconds to destroy the instance on the hypervisor. [ 804.467642] env[68194]: DEBUG oslo.service.loopingcall [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 804.467843] env[68194]: DEBUG nova.compute.manager [-] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Skipping network deallocation for instance since networking was not requested. {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 804.470154] env[68194]: DEBUG nova.compute.claims [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 804.470326] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 804.470537] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 804.490253] env[68194]: DEBUG oslo_vmware.rw_handles [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9575dfac-79e6-498d-bddf-1a0862500d08/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 804.555671] env[68194]: DEBUG oslo_vmware.rw_handles [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 804.559131] env[68194]: DEBUG oslo_vmware.rw_handles [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/9575dfac-79e6-498d-bddf-1a0862500d08/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 804.915770] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af6802c1-301b-46db-992f-f6a6de0c23df {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.923549] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4e474a8-870d-402c-b495-c3f299082b31 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.955251] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d7917ada-05e0-4774-b7f4-02eb50272f1d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.962761] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e0707b2-f346-4b64-8ae5-a7566fc7f81b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 804.976686] env[68194]: DEBUG nova.compute.provider_tree [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 804.986943] env[68194]: DEBUG nova.scheduler.client.report [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 805.005409] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.535s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 805.006035] env[68194]: ERROR nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 805.006035] env[68194]: Faults: ['InvalidArgument'] [ 805.006035] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Traceback (most recent call last): [ 805.006035] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 805.006035] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] self.driver.spawn(context, instance, image_meta, [ 805.006035] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 805.006035] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 805.006035] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 805.006035] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] self._fetch_image_if_missing(context, vi) [ 805.006035] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 805.006035] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] image_cache(vi, tmp_image_ds_loc) [ 805.006035] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] vm_util.copy_virtual_disk( [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] session._wait_for_task(vmdk_copy_task) [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] return self.wait_for_task(task_ref) [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] return evt.wait() [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] result = hub.switch() [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] return self.greenlet.switch() [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 805.006345] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] self.f(*self.args, **self.kw) [ 805.006740] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 805.006740] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] raise exceptions.translate_fault(task_info.error) [ 805.006740] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 805.006740] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Faults: ['InvalidArgument'] [ 805.006740] env[68194]: ERROR nova.compute.manager [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] [ 805.006740] env[68194]: DEBUG nova.compute.utils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 805.008169] env[68194]: DEBUG nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Build of instance dcdac63e-b5bb-4e53-ad3f-956a8b928e2c was re-scheduled: A specified parameter was not correct: fileType [ 805.008169] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 805.008542] env[68194]: DEBUG nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 805.008953] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Acquiring lock "refresh_cache-dcdac63e-b5bb-4e53-ad3f-956a8b928e2c" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 805.009146] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Acquired lock "refresh_cache-dcdac63e-b5bb-4e53-ad3f-956a8b928e2c" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 805.009394] env[68194]: DEBUG nova.network.neutron [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 805.036097] env[68194]: DEBUG nova.network.neutron [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 805.098752] env[68194]: DEBUG nova.network.neutron [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 805.107403] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Releasing lock "refresh_cache-dcdac63e-b5bb-4e53-ad3f-956a8b928e2c" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 805.107679] env[68194]: DEBUG nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 805.107878] env[68194]: DEBUG nova.compute.manager [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] [instance: dcdac63e-b5bb-4e53-ad3f-956a8b928e2c] Skipping network deallocation for instance since networking was not requested. {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 805.195075] env[68194]: INFO nova.scheduler.client.report [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Deleted allocations for instance dcdac63e-b5bb-4e53-ad3f-956a8b928e2c [ 805.215972] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63504448-c243-4ced-a638-6769de86532c tempest-ServerDiagnosticsV248Test-420133557 tempest-ServerDiagnosticsV248Test-420133557-project-member] Lock "dcdac63e-b5bb-4e53-ad3f-956a8b928e2c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 188.056s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 805.227153] env[68194]: DEBUG nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 805.278263] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 805.278509] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 805.280373] env[68194]: INFO nova.compute.claims [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 805.627578] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-090f972f-929b-4334-8f5e-0003928e7197 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.635255] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d41f095-9ff3-4f3b-9e72-5fe31272e715 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.664991] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cbff6c5-79d6-40fe-ac32-14c5d02625df {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.672357] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9e5a42b7-3228-4d93-afac-ad59d471eb1d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.685440] env[68194]: DEBUG nova.compute.provider_tree [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 805.695703] env[68194]: DEBUG nova.scheduler.client.report [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 805.708962] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.430s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 805.709511] env[68194]: DEBUG nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 805.743989] env[68194]: DEBUG nova.compute.utils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 805.745616] env[68194]: DEBUG nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 805.745791] env[68194]: DEBUG nova.network.neutron [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 805.755108] env[68194]: DEBUG nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 805.817292] env[68194]: DEBUG nova.policy [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '628bc4df46494159a5e5a4b71770f64f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7fe2744a0c14564ae1dea9f2653bc4a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 805.829885] env[68194]: DEBUG nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 805.856179] env[68194]: DEBUG nova.virt.hardware [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 805.856478] env[68194]: DEBUG nova.virt.hardware [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 805.856605] env[68194]: DEBUG nova.virt.hardware [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 805.856791] env[68194]: DEBUG nova.virt.hardware [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 805.856941] env[68194]: DEBUG nova.virt.hardware [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 805.857098] env[68194]: DEBUG nova.virt.hardware [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 805.857326] env[68194]: DEBUG nova.virt.hardware [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 805.857487] env[68194]: DEBUG nova.virt.hardware [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 805.857654] env[68194]: DEBUG nova.virt.hardware [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 805.857820] env[68194]: DEBUG nova.virt.hardware [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 805.857991] env[68194]: DEBUG nova.virt.hardware [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 805.858869] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a7ed7b0a-f900-48bc-8452-2e8c83e89bc4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 805.866945] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d181b964-b43c-48c6-b9da-dc7ddc45119c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 806.125140] env[68194]: DEBUG nova.network.neutron [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Successfully created port: 81d12ccf-68ac-4d56-a0c0-4d082377a137 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 806.736628] env[68194]: DEBUG nova.network.neutron [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Successfully updated port: 81d12ccf-68ac-4d56-a0c0-4d082377a137 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 806.747516] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "refresh_cache-2243c245-bbb3-43b7-89a9-fb727d452885" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 806.747628] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquired lock "refresh_cache-2243c245-bbb3-43b7-89a9-fb727d452885" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 806.747781] env[68194]: DEBUG nova.network.neutron [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 806.820567] env[68194]: DEBUG nova.network.neutron [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 806.949920] env[68194]: DEBUG nova.compute.manager [req-7e879f6a-d57f-495f-821e-bd8b255e68f5 req-4c73492b-823a-4fa5-85cd-9dc9b8350e2e service nova] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Received event network-vif-plugged-81d12ccf-68ac-4d56-a0c0-4d082377a137 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 806.949920] env[68194]: DEBUG oslo_concurrency.lockutils [req-7e879f6a-d57f-495f-821e-bd8b255e68f5 req-4c73492b-823a-4fa5-85cd-9dc9b8350e2e service nova] Acquiring lock "2243c245-bbb3-43b7-89a9-fb727d452885-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 806.950181] env[68194]: DEBUG oslo_concurrency.lockutils [req-7e879f6a-d57f-495f-821e-bd8b255e68f5 req-4c73492b-823a-4fa5-85cd-9dc9b8350e2e service nova] Lock "2243c245-bbb3-43b7-89a9-fb727d452885-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 806.950446] env[68194]: DEBUG oslo_concurrency.lockutils [req-7e879f6a-d57f-495f-821e-bd8b255e68f5 req-4c73492b-823a-4fa5-85cd-9dc9b8350e2e service nova] Lock "2243c245-bbb3-43b7-89a9-fb727d452885-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 806.950446] env[68194]: DEBUG nova.compute.manager [req-7e879f6a-d57f-495f-821e-bd8b255e68f5 req-4c73492b-823a-4fa5-85cd-9dc9b8350e2e service nova] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] No waiting events found dispatching network-vif-plugged-81d12ccf-68ac-4d56-a0c0-4d082377a137 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 806.950603] env[68194]: WARNING nova.compute.manager [req-7e879f6a-d57f-495f-821e-bd8b255e68f5 req-4c73492b-823a-4fa5-85cd-9dc9b8350e2e service nova] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Received unexpected event network-vif-plugged-81d12ccf-68ac-4d56-a0c0-4d082377a137 for instance with vm_state building and task_state spawning. [ 807.052969] env[68194]: DEBUG nova.network.neutron [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Updating instance_info_cache with network_info: [{"id": "81d12ccf-68ac-4d56-a0c0-4d082377a137", "address": "fa:16:3e:90:13:e0", "network": {"id": "0ba2ed75-b752-41fa-b27d-5a564f3d942e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2075431146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7fe2744a0c14564ae1dea9f2653bc4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47499d09-8010-4d02-ac96-4f057c104692", "external-id": "nsx-vlan-transportzone-14", "segmentation_id": 14, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81d12ccf-68", "ovs_interfaceid": "81d12ccf-68ac-4d56-a0c0-4d082377a137", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 807.069228] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Releasing lock "refresh_cache-2243c245-bbb3-43b7-89a9-fb727d452885" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 807.069540] env[68194]: DEBUG nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Instance network_info: |[{"id": "81d12ccf-68ac-4d56-a0c0-4d082377a137", "address": "fa:16:3e:90:13:e0", "network": {"id": "0ba2ed75-b752-41fa-b27d-5a564f3d942e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2075431146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7fe2744a0c14564ae1dea9f2653bc4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47499d09-8010-4d02-ac96-4f057c104692", "external-id": "nsx-vlan-transportzone-14", "segmentation_id": 14, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81d12ccf-68", "ovs_interfaceid": "81d12ccf-68ac-4d56-a0c0-4d082377a137", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 807.071022] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:90:13:e0', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '47499d09-8010-4d02-ac96-4f057c104692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '81d12ccf-68ac-4d56-a0c0-4d082377a137', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 807.081048] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Creating folder: Project (c7fe2744a0c14564ae1dea9f2653bc4a). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 807.081627] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-fcc35a84-c4f8-4a20-92b8-3e356fa53097 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.093836] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Created folder: Project (c7fe2744a0c14564ae1dea9f2653bc4a) in parent group-v692426. [ 807.094204] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Creating folder: Instances. Parent ref: group-v692474. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 807.094301] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-41c3663e-0ce1-41e8-b8a8-1d39852100e0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.102338] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Created folder: Instances in parent group-v692474. [ 807.102762] env[68194]: DEBUG oslo.service.loopingcall [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 807.102762] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 807.102895] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0acba7dd-634b-4504-a251-ad82ad094ff3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.121072] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 807.121072] env[68194]: value = "task-3466802" [ 807.121072] env[68194]: _type = "Task" [ 807.121072] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.128022] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466802, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 807.631435] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466802, 'name': CreateVM_Task, 'duration_secs': 0.334125} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 807.631435] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 807.632022] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 807.632198] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 807.632514] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 807.632916] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fb2d63f9-e5dc-4226-8076-b25d1fe06dfe {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 807.637104] env[68194]: DEBUG oslo_vmware.api [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for the task: (returnval){ [ 807.637104] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]525d4b61-4b6d-30c2-c8ed-27a8bd6fa4ae" [ 807.637104] env[68194]: _type = "Task" [ 807.637104] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 807.644470] env[68194]: DEBUG oslo_vmware.api [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]525d4b61-4b6d-30c2-c8ed-27a8bd6fa4ae, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 808.147583] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 808.147882] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 808.147940] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 809.184101] env[68194]: DEBUG nova.compute.manager [req-42982ffa-75ad-4919-b4df-81c5f33b38d4 req-c0f28bf1-fc05-4227-9aeb-a048e7ac3870 service nova] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Received event network-changed-81d12ccf-68ac-4d56-a0c0-4d082377a137 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 809.184101] env[68194]: DEBUG nova.compute.manager [req-42982ffa-75ad-4919-b4df-81c5f33b38d4 req-c0f28bf1-fc05-4227-9aeb-a048e7ac3870 service nova] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Refreshing instance network info cache due to event network-changed-81d12ccf-68ac-4d56-a0c0-4d082377a137. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 809.184101] env[68194]: DEBUG oslo_concurrency.lockutils [req-42982ffa-75ad-4919-b4df-81c5f33b38d4 req-c0f28bf1-fc05-4227-9aeb-a048e7ac3870 service nova] Acquiring lock "refresh_cache-2243c245-bbb3-43b7-89a9-fb727d452885" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 809.184101] env[68194]: DEBUG oslo_concurrency.lockutils [req-42982ffa-75ad-4919-b4df-81c5f33b38d4 req-c0f28bf1-fc05-4227-9aeb-a048e7ac3870 service nova] Acquired lock "refresh_cache-2243c245-bbb3-43b7-89a9-fb727d452885" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 809.184563] env[68194]: DEBUG nova.network.neutron [req-42982ffa-75ad-4919-b4df-81c5f33b38d4 req-c0f28bf1-fc05-4227-9aeb-a048e7ac3870 service nova] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Refreshing network info cache for port 81d12ccf-68ac-4d56-a0c0-4d082377a137 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 809.468663] env[68194]: DEBUG nova.network.neutron [req-42982ffa-75ad-4919-b4df-81c5f33b38d4 req-c0f28bf1-fc05-4227-9aeb-a048e7ac3870 service nova] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Updated VIF entry in instance network info cache for port 81d12ccf-68ac-4d56-a0c0-4d082377a137. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 809.469014] env[68194]: DEBUG nova.network.neutron [req-42982ffa-75ad-4919-b4df-81c5f33b38d4 req-c0f28bf1-fc05-4227-9aeb-a048e7ac3870 service nova] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Updating instance_info_cache with network_info: [{"id": "81d12ccf-68ac-4d56-a0c0-4d082377a137", "address": "fa:16:3e:90:13:e0", "network": {"id": "0ba2ed75-b752-41fa-b27d-5a564f3d942e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2075431146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7fe2744a0c14564ae1dea9f2653bc4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47499d09-8010-4d02-ac96-4f057c104692", "external-id": "nsx-vlan-transportzone-14", "segmentation_id": 14, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap81d12ccf-68", "ovs_interfaceid": "81d12ccf-68ac-4d56-a0c0-4d082377a137", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 809.480860] env[68194]: DEBUG oslo_concurrency.lockutils [req-42982ffa-75ad-4919-b4df-81c5f33b38d4 req-c0f28bf1-fc05-4227-9aeb-a048e7ac3870 service nova] Releasing lock "refresh_cache-2243c245-bbb3-43b7-89a9-fb727d452885" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 811.148137] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.356907] env[68194]: DEBUG oslo_concurrency.lockutils [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquiring lock "3c6604f4-4be4-45e5-9ff7-fe9ac96693ae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 811.416506] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.416506] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.416506] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 811.416506] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 812.411532] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 812.417582] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 812.417582] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 812.417582] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 813.416307] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 813.416703] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 813.416703] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 813.452991] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 813.456445] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 813.456445] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 813.456445] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 813.456445] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 813.456724] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 813.456724] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 813.456962] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: fe79ae03-c408-4d18-914e-e64065998663] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 813.456962] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 813.457073] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 813.457190] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 814.416313] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 814.429152] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 814.429255] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 814.429598] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 814.429598] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 814.432950] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18862689-6d58-44b0-aa4c-f00818f5af33 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.439986] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8d540f3-a8f2-47fd-8a75-d24928bb04d8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.455652] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9858c602-d984-4e22-8789-e22bcd7c6d83 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.466863] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3c9bf00-6235-4c27-86c9-d10421e58f8c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 814.498250] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180949MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 814.498589] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 814.498661] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 814.587559] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 814.587807] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 814.588013] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3d27a0be-599b-4bb4-89db-ff79d33047c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 814.588202] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 10df4090-9ec0-4876-8925-23e585344a3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 814.588378] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4d692986-413f-4c9b-b5cc-de43d2ca498d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 814.588561] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b8aaf064-e8a6-444a-83cd-6a7e02b82f33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 814.588870] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e575e1c7-7f35-41de-96e7-0771a4137bf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 814.589074] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fe79ae03-c408-4d18-914e-e64065998663 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 814.589256] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cef5dc8e-1a5c-4248-9bac-ff25880588ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 814.589434] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2243c245-bbb3-43b7-89a9-fb727d452885 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 814.604507] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 6b13d579-06d7-4bd4-a632-0cd978074902 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.624064] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 0156b780-3e46-4283-829c-9439698f3c8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.637977] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance a5d1581e-4152-47fd-801b-e88f94dd0546 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.648290] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 0db07fdf-30c1-4367-999d-d9e8e9763b45 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.660497] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 0702f4fa-2d01-4be3-abe8-faa32566d65d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.670962] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance c0708080-3a59-4def-b90b-1c5959d317fb has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.684329] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance f1235b58-9673-4d54-ad1d-c48d4ff584e2 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.694838] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.707454] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d9b69c9f-9ad6-4605-a7b5-54eed2035cc0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.720216] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e2e34a7a-d419-4fc8-ae82-ff5874aa23d1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.735518] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance afe91255-b954-4518-b0ca-a1f4ddcfd9ba has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.747447] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cfa6ea83-c10f-4c87-860a-b26fb80e5f12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.759811] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance edfef42a-a968-4c36-92a4-e608037eab3f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.772814] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcaa671a-38d8-45a5-b772-7d856594f700 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.791218] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance faf174e0-4af8-4f58-be5d-2f8915b0d58b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.802753] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance a6920ad4-bf1c-4daa-9b9a-81e782c88a20 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.814861] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.826861] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ff16d7c1-a601-4ac6-be52-823727c8b843 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 814.827139] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 814.827517] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 815.233342] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bff955fa-69af-4a9f-a38e-558a9fbd1d8a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.241201] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd4d9045-a5ac-4c8e-ab27-7af6e38b4ba0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.270347] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-449f62e8-8e8d-481a-a656-bc208788c3a8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.278465] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d88efeb-aef8-4325-818e-caf779f62313 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 815.293782] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 815.302589] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 815.323296] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 815.323500] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.825s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 815.640929] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquiring lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 815.641254] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 816.512145] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2c677f55-1024-4ed0-a2c4-7da6f53c3696 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquiring lock "1f155fbf-6460-4426-b6cf-176d44415eee" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 816.512542] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2c677f55-1024-4ed0-a2c4-7da6f53c3696 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Lock "1f155fbf-6460-4426-b6cf-176d44415eee" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 817.668575] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Acquiring lock "046c6f49-9cbd-4e5a-8a26-d0bc11a57d51" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 821.476667] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Acquiring lock "10df4090-9ec0-4876-8925-23e585344a3b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 822.111752] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Acquiring lock "4d692986-413f-4c9b-b5cc-de43d2ca498d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 822.337382] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Acquiring lock "3d27a0be-599b-4bb4-89db-ff79d33047c8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 827.912907] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquiring lock "b8aaf064-e8a6-444a-83cd-6a7e02b82f33" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 834.988963] env[68194]: DEBUG oslo_concurrency.lockutils [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Acquiring lock "e575e1c7-7f35-41de-96e7-0771a4137bf5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 848.727141] env[68194]: DEBUG oslo_concurrency.lockutils [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Acquiring lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 849.142029] env[68194]: DEBUG oslo_concurrency.lockutils [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Acquiring lock "fe79ae03-c408-4d18-914e-e64065998663" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 850.211047] env[68194]: DEBUG oslo_concurrency.lockutils [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "2243c245-bbb3-43b7-89a9-fb727d452885" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 853.260141] env[68194]: WARNING oslo_vmware.rw_handles [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 853.260141] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 853.260141] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 853.260141] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 853.260141] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 853.260141] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 853.260141] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 853.260141] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 853.260141] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 853.260141] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 853.260141] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 853.260141] env[68194]: ERROR oslo_vmware.rw_handles [ 853.261294] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/9575dfac-79e6-498d-bddf-1a0862500d08/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 853.262634] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 853.262999] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Copying Virtual Disk [datastore1] vmware_temp/9575dfac-79e6-498d-bddf-1a0862500d08/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/9575dfac-79e6-498d-bddf-1a0862500d08/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 853.265865] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-889690c1-8496-4a59-8436-60e3d2908e23 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.273885] env[68194]: DEBUG oslo_vmware.api [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Waiting for the task: (returnval){ [ 853.273885] env[68194]: value = "task-3466803" [ 853.273885] env[68194]: _type = "Task" [ 853.273885] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.283042] env[68194]: DEBUG oslo_vmware.api [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Task: {'id': task-3466803, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.786551] env[68194]: DEBUG oslo_vmware.exceptions [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 853.787314] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 853.788381] env[68194]: ERROR nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 853.788381] env[68194]: Faults: ['InvalidArgument'] [ 853.788381] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Traceback (most recent call last): [ 853.788381] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 853.788381] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] yield resources [ 853.788381] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 853.788381] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] self.driver.spawn(context, instance, image_meta, [ 853.788381] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 853.788381] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 853.788381] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 853.788381] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] self._fetch_image_if_missing(context, vi) [ 853.788381] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] image_cache(vi, tmp_image_ds_loc) [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] vm_util.copy_virtual_disk( [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] session._wait_for_task(vmdk_copy_task) [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] return self.wait_for_task(task_ref) [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] return evt.wait() [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] result = hub.switch() [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 853.789130] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] return self.greenlet.switch() [ 853.789759] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 853.789759] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] self.f(*self.args, **self.kw) [ 853.789759] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 853.789759] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] raise exceptions.translate_fault(task_info.error) [ 853.789759] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 853.789759] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Faults: ['InvalidArgument'] [ 853.789759] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] [ 853.789759] env[68194]: INFO nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Terminating instance [ 853.792216] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 853.792767] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 853.793891] env[68194]: DEBUG nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 853.794241] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 853.794682] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7d78fd4d-ba87-4257-af91-711cb8171924 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.798772] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24a972b8-74bc-47a1-81b8-e2737cd3c30f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.809648] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 853.809881] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2cf2d6f4-fb67-4d64-a2c7-f1e91e277669 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.813384] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 853.813890] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 853.815644] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fbfe9359-75c7-45e1-bb22-2947e6bb2b12 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.822846] env[68194]: DEBUG oslo_vmware.api [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Waiting for the task: (returnval){ [ 853.822846] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5298817d-16a0-9df5-4e19-6a401401b264" [ 853.822846] env[68194]: _type = "Task" [ 853.822846] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.831723] env[68194]: DEBUG oslo_vmware.api [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5298817d-16a0-9df5-4e19-6a401401b264, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 853.880527] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 853.880956] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 853.880956] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Deleting the datastore file [datastore1] 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 853.881257] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cf1a9013-a731-4582-96b7-9ee71b00f084 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 853.888560] env[68194]: DEBUG oslo_vmware.api [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Waiting for the task: (returnval){ [ 853.888560] env[68194]: value = "task-3466805" [ 853.888560] env[68194]: _type = "Task" [ 853.888560] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 853.896720] env[68194]: DEBUG oslo_vmware.api [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Task: {'id': task-3466805, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 854.333483] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 854.333795] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Creating directory with path [datastore1] vmware_temp/293966b5-4d84-4630-a131-7d703bacef75/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 854.334083] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7f178e5a-971c-4215-9fd8-c558d46ac191 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.350309] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Created directory with path [datastore1] vmware_temp/293966b5-4d84-4630-a131-7d703bacef75/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 854.350512] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Fetch image to [datastore1] vmware_temp/293966b5-4d84-4630-a131-7d703bacef75/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 854.350687] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/293966b5-4d84-4630-a131-7d703bacef75/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 854.351434] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d53d2f40-d56e-4c02-82a7-9b317b56b3f9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.358561] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5b426db-f74a-4569-99d9-614b186167ab {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.370472] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-09a0cce8-c895-4cfd-a1b0-40397802e236 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.407107] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8208ab0d-4e3a-4fce-a3ca-bc4d72e2eabc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.416470] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c898884b-95c8-46ed-8650-214a7a33c957 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.418401] env[68194]: DEBUG oslo_vmware.api [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Task: {'id': task-3466805, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.09638} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 854.418529] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 854.418709] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 854.418873] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 854.419052] env[68194]: INFO nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Took 0.62 seconds to destroy the instance on the hypervisor. [ 854.424396] env[68194]: DEBUG nova.compute.claims [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 854.424396] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 854.424396] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 854.440059] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 854.503650] env[68194]: DEBUG oslo_vmware.rw_handles [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/293966b5-4d84-4630-a131-7d703bacef75/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 854.566748] env[68194]: DEBUG oslo_vmware.rw_handles [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 854.566748] env[68194]: DEBUG oslo_vmware.rw_handles [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/293966b5-4d84-4630-a131-7d703bacef75/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 854.944425] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b8d7501-feb0-4952-bec9-e8a100631b7c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.951931] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c762ed0f-eec3-4f7f-8b55-ada55e5b776c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.985682] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78dff22a-43f8-42ef-bc46-76b89c4522ac {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 854.992923] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0d1a8db-7a4d-4f1c-a795-dfc39d0eec98 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.005714] env[68194]: DEBUG nova.compute.provider_tree [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 855.014534] env[68194]: DEBUG nova.scheduler.client.report [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 855.036467] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.612s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 855.037161] env[68194]: ERROR nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 855.037161] env[68194]: Faults: ['InvalidArgument'] [ 855.037161] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Traceback (most recent call last): [ 855.037161] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 855.037161] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] self.driver.spawn(context, instance, image_meta, [ 855.037161] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 855.037161] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 855.037161] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 855.037161] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] self._fetch_image_if_missing(context, vi) [ 855.037161] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 855.037161] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] image_cache(vi, tmp_image_ds_loc) [ 855.037161] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] vm_util.copy_virtual_disk( [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] session._wait_for_task(vmdk_copy_task) [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] return self.wait_for_task(task_ref) [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] return evt.wait() [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] result = hub.switch() [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] return self.greenlet.switch() [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 855.037520] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] self.f(*self.args, **self.kw) [ 855.037842] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 855.037842] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] raise exceptions.translate_fault(task_info.error) [ 855.037842] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 855.037842] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Faults: ['InvalidArgument'] [ 855.037842] env[68194]: ERROR nova.compute.manager [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] [ 855.038396] env[68194]: DEBUG nova.compute.utils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 855.040964] env[68194]: DEBUG nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Build of instance 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae was re-scheduled: A specified parameter was not correct: fileType [ 855.040964] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 855.041430] env[68194]: DEBUG nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 855.041660] env[68194]: DEBUG nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 855.041875] env[68194]: DEBUG nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 855.041979] env[68194]: DEBUG nova.network.neutron [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 855.700970] env[68194]: DEBUG nova.network.neutron [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 855.716514] env[68194]: INFO nova.compute.manager [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Took 0.67 seconds to deallocate network for instance. [ 855.899443] env[68194]: INFO nova.scheduler.client.report [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Deleted allocations for instance 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae [ 855.931257] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b3751e0-4643-4390-b7a6-d983a2252e56 tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "3c6604f4-4be4-45e5-9ff7-fe9ac96693ae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 243.604s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 855.932657] env[68194]: DEBUG oslo_concurrency.lockutils [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "3c6604f4-4be4-45e5-9ff7-fe9ac96693ae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 44.576s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 855.932754] env[68194]: DEBUG oslo_concurrency.lockutils [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquiring lock "3c6604f4-4be4-45e5-9ff7-fe9ac96693ae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 855.932966] env[68194]: DEBUG oslo_concurrency.lockutils [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "3c6604f4-4be4-45e5-9ff7-fe9ac96693ae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 855.933215] env[68194]: DEBUG oslo_concurrency.lockutils [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "3c6604f4-4be4-45e5-9ff7-fe9ac96693ae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 855.936099] env[68194]: INFO nova.compute.manager [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Terminating instance [ 855.938775] env[68194]: DEBUG nova.compute.manager [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 855.938775] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 855.938775] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c14aa7bb-ecc5-46c6-af0f-ecb01b9e3539 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.950285] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bb1a504-e64d-41ea-b676-ccb93d8c8d29 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 855.963510] env[68194]: DEBUG nova.compute.manager [None req-f78a9e09-79bc-4abb-bb0b-0524746de721 tempest-InstanceActionsNegativeTestJSON-2041831807 tempest-InstanceActionsNegativeTestJSON-2041831807-project-member] [instance: 6b13d579-06d7-4bd4-a632-0cd978074902] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 855.987051] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae could not be found. [ 855.987051] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 855.987051] env[68194]: INFO nova.compute.manager [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Took 0.05 seconds to destroy the instance on the hypervisor. [ 855.987051] env[68194]: DEBUG oslo.service.loopingcall [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 855.987051] env[68194]: DEBUG nova.compute.manager [-] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 855.987805] env[68194]: DEBUG nova.network.neutron [-] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 856.008217] env[68194]: DEBUG nova.compute.manager [None req-f78a9e09-79bc-4abb-bb0b-0524746de721 tempest-InstanceActionsNegativeTestJSON-2041831807 tempest-InstanceActionsNegativeTestJSON-2041831807-project-member] [instance: 6b13d579-06d7-4bd4-a632-0cd978074902] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 856.025394] env[68194]: DEBUG nova.network.neutron [-] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 856.053815] env[68194]: INFO nova.compute.manager [-] [instance: 3c6604f4-4be4-45e5-9ff7-fe9ac96693ae] Took 0.07 seconds to deallocate network for instance. [ 856.055608] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f78a9e09-79bc-4abb-bb0b-0524746de721 tempest-InstanceActionsNegativeTestJSON-2041831807 tempest-InstanceActionsNegativeTestJSON-2041831807-project-member] Lock "6b13d579-06d7-4bd4-a632-0cd978074902" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.760s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 856.069384] env[68194]: DEBUG nova.compute.manager [None req-44baac25-cf7a-40cf-b494-99a8ba377dba tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: 0156b780-3e46-4283-829c-9439698f3c8a] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 856.096881] env[68194]: DEBUG nova.compute.manager [None req-44baac25-cf7a-40cf-b494-99a8ba377dba tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: 0156b780-3e46-4283-829c-9439698f3c8a] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 856.134663] env[68194]: DEBUG oslo_concurrency.lockutils [None req-44baac25-cf7a-40cf-b494-99a8ba377dba tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Lock "0156b780-3e46-4283-829c-9439698f3c8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.077s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 856.151457] env[68194]: DEBUG nova.compute.manager [None req-74d3b6c7-4953-4721-a0de-6d1f80eb163e tempest-ServersTestFqdnHostnames-541581067 tempest-ServersTestFqdnHostnames-541581067-project-member] [instance: a5d1581e-4152-47fd-801b-e88f94dd0546] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 856.178722] env[68194]: DEBUG nova.compute.manager [None req-74d3b6c7-4953-4721-a0de-6d1f80eb163e tempest-ServersTestFqdnHostnames-541581067 tempest-ServersTestFqdnHostnames-541581067-project-member] [instance: a5d1581e-4152-47fd-801b-e88f94dd0546] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 856.215257] env[68194]: DEBUG oslo_concurrency.lockutils [None req-74d3b6c7-4953-4721-a0de-6d1f80eb163e tempest-ServersTestFqdnHostnames-541581067 tempest-ServersTestFqdnHostnames-541581067-project-member] Lock "a5d1581e-4152-47fd-801b-e88f94dd0546" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.372s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 856.222383] env[68194]: DEBUG oslo_concurrency.lockutils [None req-df61a305-d00b-4d60-a7e4-c17988559e8e tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "3c6604f4-4be4-45e5-9ff7-fe9ac96693ae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.290s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 856.225793] env[68194]: DEBUG nova.compute.manager [None req-89c3bd53-1942-43f5-b00e-c11c15c4aefc tempest-VolumesAssistedSnapshotsTest-515215701 tempest-VolumesAssistedSnapshotsTest-515215701-project-member] [instance: 0db07fdf-30c1-4367-999d-d9e8e9763b45] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 856.254173] env[68194]: DEBUG nova.compute.manager [None req-89c3bd53-1942-43f5-b00e-c11c15c4aefc tempest-VolumesAssistedSnapshotsTest-515215701 tempest-VolumesAssistedSnapshotsTest-515215701-project-member] [instance: 0db07fdf-30c1-4367-999d-d9e8e9763b45] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 856.280344] env[68194]: DEBUG oslo_concurrency.lockutils [None req-89c3bd53-1942-43f5-b00e-c11c15c4aefc tempest-VolumesAssistedSnapshotsTest-515215701 tempest-VolumesAssistedSnapshotsTest-515215701-project-member] Lock "0db07fdf-30c1-4367-999d-d9e8e9763b45" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 202.441s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 856.294530] env[68194]: DEBUG nova.compute.manager [None req-0a629d4c-cced-406e-8a43-74942619fdf7 tempest-ServersV294TestFqdnHostnames-1510252864 tempest-ServersV294TestFqdnHostnames-1510252864-project-member] [instance: 0702f4fa-2d01-4be3-abe8-faa32566d65d] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 856.324750] env[68194]: DEBUG nova.compute.manager [None req-0a629d4c-cced-406e-8a43-74942619fdf7 tempest-ServersV294TestFqdnHostnames-1510252864 tempest-ServersV294TestFqdnHostnames-1510252864-project-member] [instance: 0702f4fa-2d01-4be3-abe8-faa32566d65d] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 856.349115] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0a629d4c-cced-406e-8a43-74942619fdf7 tempest-ServersV294TestFqdnHostnames-1510252864 tempest-ServersV294TestFqdnHostnames-1510252864-project-member] Lock "0702f4fa-2d01-4be3-abe8-faa32566d65d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 201.998s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 856.362322] env[68194]: DEBUG nova.compute.manager [None req-3f489d01-d7f0-4d41-94be-810a4645412d tempest-ServersWithSpecificFlavorTestJSON-1169256422 tempest-ServersWithSpecificFlavorTestJSON-1169256422-project-member] [instance: c0708080-3a59-4def-b90b-1c5959d317fb] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 856.395012] env[68194]: DEBUG nova.compute.manager [None req-3f489d01-d7f0-4d41-94be-810a4645412d tempest-ServersWithSpecificFlavorTestJSON-1169256422 tempest-ServersWithSpecificFlavorTestJSON-1169256422-project-member] [instance: c0708080-3a59-4def-b90b-1c5959d317fb] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 856.432442] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3f489d01-d7f0-4d41-94be-810a4645412d tempest-ServersWithSpecificFlavorTestJSON-1169256422 tempest-ServersWithSpecificFlavorTestJSON-1169256422-project-member] Lock "c0708080-3a59-4def-b90b-1c5959d317fb" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.382s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 856.445795] env[68194]: DEBUG nova.compute.manager [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 856.513163] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 856.513163] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 856.513163] env[68194]: INFO nova.compute.claims [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 856.932724] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Acquiring lock "f1235b58-9673-4d54-ad1d-c48d4ff584e2" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 856.943083] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b83448d5-7432-49f4-ade6-f9e7cb070517 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.954432] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4d72e76b-6364-4a8b-aa22-d687a53e7a20 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.987531] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76a306ec-9e13-4607-8ab1-9096e4bbfb31 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 856.995552] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca000b93-ba4f-4807-ba9a-352bf44322ca {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.011216] env[68194]: DEBUG nova.compute.provider_tree [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.023346] env[68194]: DEBUG nova.scheduler.client.report [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 857.037101] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.526s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 857.037581] env[68194]: DEBUG nova.compute.manager [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 857.084875] env[68194]: DEBUG nova.compute.claims [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 857.085304] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 857.085404] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 857.481243] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfa1394-fba6-4b68-9597-e6c72b20c550 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.497016] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9349bb21-d7c1-4078-b867-5aaf18f35a7d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.527872] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9281720-836a-4aa9-9378-b767416e6ec2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.535391] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47a51ef4-708f-4fc3-817a-624e85fd4c8e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 857.551280] env[68194]: DEBUG nova.compute.provider_tree [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 857.564822] env[68194]: DEBUG nova.scheduler.client.report [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 857.584234] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.499s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 857.585022] env[68194]: DEBUG nova.compute.utils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Conflict updating instance f1235b58-9673-4d54-ad1d-c48d4ff584e2. Expected: {'task_state': [None]}. Actual: {'task_state': 'deleting'} {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 857.586780] env[68194]: DEBUG nova.compute.manager [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Instance disappeared during build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 857.586963] env[68194]: DEBUG nova.compute.manager [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 857.587965] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Acquiring lock "refresh_cache-f1235b58-9673-4d54-ad1d-c48d4ff584e2" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 857.587965] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Acquired lock "refresh_cache-f1235b58-9673-4d54-ad1d-c48d4ff584e2" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 857.587965] env[68194]: DEBUG nova.network.neutron [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 857.635120] env[68194]: DEBUG nova.network.neutron [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 857.869584] env[68194]: DEBUG nova.network.neutron [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.879352] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Releasing lock "refresh_cache-f1235b58-9673-4d54-ad1d-c48d4ff584e2" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 857.880280] env[68194]: DEBUG nova.compute.manager [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 857.880729] env[68194]: DEBUG nova.compute.manager [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 857.883020] env[68194]: DEBUG nova.network.neutron [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 857.906446] env[68194]: DEBUG nova.network.neutron [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 857.920511] env[68194]: DEBUG nova.network.neutron [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 857.971382] env[68194]: INFO nova.compute.manager [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Took 0.09 seconds to deallocate network for instance. [ 858.003371] env[68194]: DEBUG oslo_concurrency.lockutils [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquiring lock "8216e910-66b8-4147-a264-93e7eeefc7da" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 858.004315] env[68194]: DEBUG oslo_concurrency.lockutils [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "8216e910-66b8-4147-a264-93e7eeefc7da" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 858.068063] env[68194]: INFO nova.scheduler.client.report [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Deleted allocations for instance f1235b58-9673-4d54-ad1d-c48d4ff584e2 [ 858.068063] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0afb1840-4f38-4322-b0e8-7cfae3e0da6f tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Lock "f1235b58-9673-4d54-ad1d-c48d4ff584e2" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 199.113s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 858.068063] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Lock "f1235b58-9673-4d54-ad1d-c48d4ff584e2" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 1.135s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 858.068063] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Acquiring lock "f1235b58-9673-4d54-ad1d-c48d4ff584e2-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 858.068312] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Lock "f1235b58-9673-4d54-ad1d-c48d4ff584e2-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 858.068312] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Lock "f1235b58-9673-4d54-ad1d-c48d4ff584e2-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 858.071609] env[68194]: INFO nova.compute.manager [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Terminating instance [ 858.075047] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Acquiring lock "refresh_cache-f1235b58-9673-4d54-ad1d-c48d4ff584e2" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 858.075382] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Acquired lock "refresh_cache-f1235b58-9673-4d54-ad1d-c48d4ff584e2" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 858.075776] env[68194]: DEBUG nova.network.neutron [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 858.087655] env[68194]: DEBUG nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 858.143639] env[68194]: DEBUG nova.network.neutron [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.150196] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 858.150588] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.002s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 858.152111] env[68194]: INFO nova.compute.claims [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 858.328373] env[68194]: DEBUG nova.network.neutron [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.342142] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Releasing lock "refresh_cache-f1235b58-9673-4d54-ad1d-c48d4ff584e2" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 858.346016] env[68194]: DEBUG nova.compute.manager [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 858.346016] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 858.346016] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-d0fb82ec-553a-4ec0-9baa-9c3827b54945 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.361761] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f69ca90-1e22-4a11-a03d-fa3f2214a0a7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.398336] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance f1235b58-9673-4d54-ad1d-c48d4ff584e2 could not be found. [ 858.398701] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 858.398895] env[68194]: INFO nova.compute.manager [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Took 0.06 seconds to destroy the instance on the hypervisor. [ 858.399162] env[68194]: DEBUG oslo.service.loopingcall [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 858.403268] env[68194]: DEBUG nova.compute.manager [-] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 858.403268] env[68194]: DEBUG nova.network.neutron [-] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 858.421811] env[68194]: DEBUG nova.network.neutron [-] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 858.431986] env[68194]: DEBUG nova.network.neutron [-] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 858.448211] env[68194]: INFO nova.compute.manager [-] [instance: f1235b58-9673-4d54-ad1d-c48d4ff584e2] Took 0.05 seconds to deallocate network for instance. [ 858.615876] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5605490b-4b2e-4afb-a72d-4f0c49a35f98 tempest-ImagesOneServerNegativeTestJSON-2135308344 tempest-ImagesOneServerNegativeTestJSON-2135308344-project-member] Lock "f1235b58-9673-4d54-ad1d-c48d4ff584e2" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.548s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 858.625542] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cd00263-dabc-42c2-900b-c59167077393 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.636601] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b554759-126f-4620-b225-51b02c013911 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.672240] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b99529b7-a07f-4fa2-a2fe-290bdf07b986 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.680138] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c1fada-bda4-4982-b053-f9699da9bc65 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.697535] env[68194]: DEBUG nova.compute.provider_tree [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 858.712598] env[68194]: DEBUG nova.scheduler.client.report [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 858.735566] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.585s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 858.736488] env[68194]: DEBUG nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 858.776696] env[68194]: DEBUG nova.compute.utils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 858.776971] env[68194]: DEBUG nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 858.777288] env[68194]: DEBUG nova.network.neutron [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 858.791901] env[68194]: DEBUG nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 858.891245] env[68194]: DEBUG nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 858.917667] env[68194]: DEBUG nova.virt.hardware [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 858.917910] env[68194]: DEBUG nova.virt.hardware [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 858.918384] env[68194]: DEBUG nova.virt.hardware [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 858.919406] env[68194]: DEBUG nova.virt.hardware [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 858.919749] env[68194]: DEBUG nova.virt.hardware [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 858.919749] env[68194]: DEBUG nova.virt.hardware [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 858.919927] env[68194]: DEBUG nova.virt.hardware [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 858.920176] env[68194]: DEBUG nova.virt.hardware [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 858.920328] env[68194]: DEBUG nova.virt.hardware [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 858.920527] env[68194]: DEBUG nova.virt.hardware [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 858.920660] env[68194]: DEBUG nova.virt.hardware [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 858.921548] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d18e20e-ceff-425a-96a1-05790cff0638 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.931934] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b99c3a2-76ba-40fb-aeea-920665e48a8a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 858.950176] env[68194]: DEBUG nova.policy [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '8cfb8a7d26664cb6b3caf264a216ec67', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'afbe60d70695482ba67fbe57ded2ee19', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 859.621386] env[68194]: DEBUG oslo_concurrency.lockutils [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquiring lock "108001a3-ff36-475b-a7a5-8e0e197c62a8" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 859.829558] env[68194]: DEBUG nova.network.neutron [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Successfully created port: 6512e736-8269-4cee-b2c7-43add091486d {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 860.930329] env[68194]: DEBUG nova.network.neutron [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Successfully updated port: 6512e736-8269-4cee-b2c7-43add091486d {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 860.944082] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquiring lock "refresh_cache-108001a3-ff36-475b-a7a5-8e0e197c62a8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 860.944288] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquired lock "refresh_cache-108001a3-ff36-475b-a7a5-8e0e197c62a8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 860.944406] env[68194]: DEBUG nova.network.neutron [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 861.013131] env[68194]: DEBUG nova.network.neutron [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 861.656524] env[68194]: DEBUG nova.network.neutron [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Updating instance_info_cache with network_info: [{"id": "6512e736-8269-4cee-b2c7-43add091486d", "address": "fa:16:3e:42:f1:77", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6512e736-82", "ovs_interfaceid": "6512e736-8269-4cee-b2c7-43add091486d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 861.671587] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Releasing lock "refresh_cache-108001a3-ff36-475b-a7a5-8e0e197c62a8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 861.671904] env[68194]: DEBUG nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Instance network_info: |[{"id": "6512e736-8269-4cee-b2c7-43add091486d", "address": "fa:16:3e:42:f1:77", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6512e736-82", "ovs_interfaceid": "6512e736-8269-4cee-b2c7-43add091486d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 861.672327] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:42:f1:77', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6512e736-8269-4cee-b2c7-43add091486d', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 861.683671] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Creating folder: Project (afbe60d70695482ba67fbe57ded2ee19). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 861.684305] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7e676e94-7a0d-4318-bedb-a156821c7e4a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.696026] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Created folder: Project (afbe60d70695482ba67fbe57ded2ee19) in parent group-v692426. [ 861.696229] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Creating folder: Instances. Parent ref: group-v692477. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 861.696465] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8687e24d-4ef4-4729-a90d-2ed512f5bdc3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.705602] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Created folder: Instances in parent group-v692477. [ 861.705837] env[68194]: DEBUG oslo.service.loopingcall [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 861.706820] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 861.706820] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ee6190c1-db56-44d6-b0e3-f38efcb13ff0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 861.726695] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 861.726695] env[68194]: value = "task-3466808" [ 861.726695] env[68194]: _type = "Task" [ 861.726695] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 861.736236] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466808, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.184938] env[68194]: DEBUG nova.compute.manager [req-f9091fc0-ee64-48be-96f5-f459e66b79b7 req-429fb454-f636-490a-afc9-ae081e42a62d service nova] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Received event network-vif-plugged-6512e736-8269-4cee-b2c7-43add091486d {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 862.185311] env[68194]: DEBUG oslo_concurrency.lockutils [req-f9091fc0-ee64-48be-96f5-f459e66b79b7 req-429fb454-f636-490a-afc9-ae081e42a62d service nova] Acquiring lock "108001a3-ff36-475b-a7a5-8e0e197c62a8-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 862.185375] env[68194]: DEBUG oslo_concurrency.lockutils [req-f9091fc0-ee64-48be-96f5-f459e66b79b7 req-429fb454-f636-490a-afc9-ae081e42a62d service nova] Lock "108001a3-ff36-475b-a7a5-8e0e197c62a8-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 862.185552] env[68194]: DEBUG oslo_concurrency.lockutils [req-f9091fc0-ee64-48be-96f5-f459e66b79b7 req-429fb454-f636-490a-afc9-ae081e42a62d service nova] Lock "108001a3-ff36-475b-a7a5-8e0e197c62a8-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 862.185708] env[68194]: DEBUG nova.compute.manager [req-f9091fc0-ee64-48be-96f5-f459e66b79b7 req-429fb454-f636-490a-afc9-ae081e42a62d service nova] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] No waiting events found dispatching network-vif-plugged-6512e736-8269-4cee-b2c7-43add091486d {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 862.185871] env[68194]: WARNING nova.compute.manager [req-f9091fc0-ee64-48be-96f5-f459e66b79b7 req-429fb454-f636-490a-afc9-ae081e42a62d service nova] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Received unexpected event network-vif-plugged-6512e736-8269-4cee-b2c7-43add091486d for instance with vm_state building and task_state deleting. [ 862.236956] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466808, 'name': CreateVM_Task, 'duration_secs': 0.326564} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 862.237061] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 862.237673] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 862.237845] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 862.238187] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 862.238432] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-bc14b244-6d28-4401-88c9-a06c7b165d1c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 862.243392] env[68194]: DEBUG oslo_vmware.api [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Waiting for the task: (returnval){ [ 862.243392] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]526ac217-1228-0aa9-8681-12da1a1751da" [ 862.243392] env[68194]: _type = "Task" [ 862.243392] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 862.252031] env[68194]: DEBUG oslo_vmware.api [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]526ac217-1228-0aa9-8681-12da1a1751da, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 862.755748] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 862.756184] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 862.756539] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 865.783839] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquiring lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 865.784218] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 866.164741] env[68194]: DEBUG nova.compute.manager [req-54652871-7ee4-45c2-96cd-f00afbf2cda2 req-4d61c7f8-1dbe-4354-a6e8-054eb9f4dc00 service nova] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Received event network-changed-6512e736-8269-4cee-b2c7-43add091486d {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 866.167597] env[68194]: DEBUG nova.compute.manager [req-54652871-7ee4-45c2-96cd-f00afbf2cda2 req-4d61c7f8-1dbe-4354-a6e8-054eb9f4dc00 service nova] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Refreshing instance network info cache due to event network-changed-6512e736-8269-4cee-b2c7-43add091486d. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 866.167597] env[68194]: DEBUG oslo_concurrency.lockutils [req-54652871-7ee4-45c2-96cd-f00afbf2cda2 req-4d61c7f8-1dbe-4354-a6e8-054eb9f4dc00 service nova] Acquiring lock "refresh_cache-108001a3-ff36-475b-a7a5-8e0e197c62a8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 866.167597] env[68194]: DEBUG oslo_concurrency.lockutils [req-54652871-7ee4-45c2-96cd-f00afbf2cda2 req-4d61c7f8-1dbe-4354-a6e8-054eb9f4dc00 service nova] Acquired lock "refresh_cache-108001a3-ff36-475b-a7a5-8e0e197c62a8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 866.167597] env[68194]: DEBUG nova.network.neutron [req-54652871-7ee4-45c2-96cd-f00afbf2cda2 req-4d61c7f8-1dbe-4354-a6e8-054eb9f4dc00 service nova] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Refreshing network info cache for port 6512e736-8269-4cee-b2c7-43add091486d {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 866.853151] env[68194]: DEBUG nova.network.neutron [req-54652871-7ee4-45c2-96cd-f00afbf2cda2 req-4d61c7f8-1dbe-4354-a6e8-054eb9f4dc00 service nova] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Updated VIF entry in instance network info cache for port 6512e736-8269-4cee-b2c7-43add091486d. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 866.853518] env[68194]: DEBUG nova.network.neutron [req-54652871-7ee4-45c2-96cd-f00afbf2cda2 req-4d61c7f8-1dbe-4354-a6e8-054eb9f4dc00 service nova] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Updating instance_info_cache with network_info: [{"id": "6512e736-8269-4cee-b2c7-43add091486d", "address": "fa:16:3e:42:f1:77", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6512e736-82", "ovs_interfaceid": "6512e736-8269-4cee-b2c7-43add091486d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 866.865174] env[68194]: DEBUG oslo_concurrency.lockutils [req-54652871-7ee4-45c2-96cd-f00afbf2cda2 req-4d61c7f8-1dbe-4354-a6e8-054eb9f4dc00 service nova] Releasing lock "refresh_cache-108001a3-ff36-475b-a7a5-8e0e197c62a8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 867.595239] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c89a6692-ac2b-41e8-b3da-4b7f140ee944 tempest-InstanceActionsTestJSON-822727360 tempest-InstanceActionsTestJSON-822727360-project-member] Acquiring lock "e9321912-2500-406b-b504-7668258a0c00" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 867.595239] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c89a6692-ac2b-41e8-b3da-4b7f140ee944 tempest-InstanceActionsTestJSON-822727360 tempest-InstanceActionsTestJSON-822727360-project-member] Lock "e9321912-2500-406b-b504-7668258a0c00" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 867.632230] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2487242f-95ba-4be7-b661-03acd89e1c3a tempest-ServersTestManualDisk-529272396 tempest-ServersTestManualDisk-529272396-project-member] Acquiring lock "8b885df7-e241-452a-bcaa-861b491a6ee0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 867.632396] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2487242f-95ba-4be7-b661-03acd89e1c3a tempest-ServersTestManualDisk-529272396 tempest-ServersTestManualDisk-529272396-project-member] Lock "8b885df7-e241-452a-bcaa-861b491a6ee0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 869.416931] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.417179] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Cleaning up deleted instances {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 869.434144] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] There are 0 instances to clean {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 869.434831] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 869.435053] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Cleaning up deleted instances with incomplete migration {{(pid=68194) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 869.448222] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 871.467599] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.416033] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.416033] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.416033] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 872.416202] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 873.413284] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 873.415769] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.415699] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.415968] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 874.429193] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 874.429414] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 874.429580] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 874.429737] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 874.430834] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3e3483dd-b22c-44a9-bf62-66ae217b1504 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.440111] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-35705a66-72a0-4d2a-9f38-956210bc0cf0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.454752] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d62c7239-2a65-4e4c-928c-8e95e4db8444 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.461525] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eafe0484-c625-4736-8c4c-cc1851ecb921 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 874.492364] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180971MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 874.492364] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 874.492497] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 874.652290] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 874.652463] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3d27a0be-599b-4bb4-89db-ff79d33047c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 874.655530] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 10df4090-9ec0-4876-8925-23e585344a3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 874.655530] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4d692986-413f-4c9b-b5cc-de43d2ca498d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 874.655530] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b8aaf064-e8a6-444a-83cd-6a7e02b82f33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 874.655530] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e575e1c7-7f35-41de-96e7-0771a4137bf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 874.655856] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fe79ae03-c408-4d18-914e-e64065998663 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 874.655856] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cef5dc8e-1a5c-4248-9bac-ff25880588ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 874.655856] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2243c245-bbb3-43b7-89a9-fb727d452885 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 874.655856] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 874.667463] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cfa6ea83-c10f-4c87-860a-b26fb80e5f12 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.679687] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance edfef42a-a968-4c36-92a4-e608037eab3f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.694927] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcaa671a-38d8-45a5-b772-7d856594f700 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.705591] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance faf174e0-4af8-4f58-be5d-2f8915b0d58b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.717571] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance a6920ad4-bf1c-4daa-9b9a-81e782c88a20 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.731751] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.745392] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ff16d7c1-a601-4ac6-be52-823727c8b843 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.764052] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.775698] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1f155fbf-6460-4426-b6cf-176d44415eee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.794028] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8216e910-66b8-4147-a264-93e7eeefc7da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.807659] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ce0f1886-189f-4ab3-9ed6-376dce542f5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.821444] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e9321912-2500-406b-b504-7668258a0c00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.836814] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8b885df7-e241-452a-bcaa-861b491a6ee0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 874.837267] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 874.837607] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 874.864348] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing inventories for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 874.882409] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Updating ProviderTree inventory for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 874.883675] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Updating inventory in ProviderTree for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 874.897468] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing aggregate associations for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5, aggregates: None {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 874.919128] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing trait associations for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 875.321252] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-989b87c9-6f1b-4f82-859f-74683406aa53 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.329309] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29bf84e9-47db-4884-a33b-8143643400a8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.362917] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ac1f1e6-8659-4aa8-abe4-6a27fb68d03d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.370431] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bbdb8423-e1e0-46a1-a010-9e8647c66d68 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 875.384179] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 875.393163] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 875.416809] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 875.417132] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.924s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 876.069533] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c4efa6ad-e1ec-4341-bf44-7023d1b4c497 tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] Acquiring lock "337db74c-cea1-4760-a06e-c33cfb4d1de9" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 876.069533] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c4efa6ad-e1ec-4341-bf44-7023d1b4c497 tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] Lock "337db74c-cea1-4760-a06e-c33cfb4d1de9" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 876.418149] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 876.418582] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 876.418582] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 876.442608] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 876.442608] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 876.442608] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 876.442608] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 876.442608] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 876.445204] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 876.445204] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: fe79ae03-c408-4d18-914e-e64065998663] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 876.445204] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 876.445204] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 876.445204] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 876.445723] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 878.259277] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1792064f-ebc9-40bd-8412-94c4ca6e15f2 tempest-ServerMetadataTestJSON-21535063 tempest-ServerMetadataTestJSON-21535063-project-member] Acquiring lock "fd693e84-5f26-4382-af13-d703dbbee894" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 878.259617] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1792064f-ebc9-40bd-8412-94c4ca6e15f2 tempest-ServerMetadataTestJSON-21535063 tempest-ServerMetadataTestJSON-21535063-project-member] Lock "fd693e84-5f26-4382-af13-d703dbbee894" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 879.313091] env[68194]: DEBUG oslo_concurrency.lockutils [None req-eab53a20-b0e5-4afd-85f2-2f4cc00f4f58 tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] Acquiring lock "e683863a-2b50-4681-a192-6955dc36562b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 879.313972] env[68194]: DEBUG oslo_concurrency.lockutils [None req-eab53a20-b0e5-4afd-85f2-2f4cc00f4f58 tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] Lock "e683863a-2b50-4681-a192-6955dc36562b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 879.345623] env[68194]: DEBUG oslo_concurrency.lockutils [None req-eab53a20-b0e5-4afd-85f2-2f4cc00f4f58 tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] Acquiring lock "f81311b2-917b-425b-8ad9-627f08548402" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 879.345932] env[68194]: DEBUG oslo_concurrency.lockutils [None req-eab53a20-b0e5-4afd-85f2-2f4cc00f4f58 tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] Lock "f81311b2-917b-425b-8ad9-627f08548402" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 883.382348] env[68194]: DEBUG oslo_concurrency.lockutils [None req-351e913d-77f6-41ff-ac1d-c435043d9c91 tempest-ServerTagsTestJSON-1095445404 tempest-ServerTagsTestJSON-1095445404-project-member] Acquiring lock "fb4dd17b-dc02-4086-b450-9449212ed7b3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 883.382348] env[68194]: DEBUG oslo_concurrency.lockutils [None req-351e913d-77f6-41ff-ac1d-c435043d9c91 tempest-ServerTagsTestJSON-1095445404 tempest-ServerTagsTestJSON-1095445404-project-member] Lock "fb4dd17b-dc02-4086-b450-9449212ed7b3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 886.063815] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76374ad8-8d6b-4fbe-9bd1-9aacf2c0cfa6 tempest-AttachVolumeShelveTestJSON-734519872 tempest-AttachVolumeShelveTestJSON-734519872-project-member] Acquiring lock "9d6026ba-70bf-4824-a23d-434d63e5bb85" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 886.064267] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76374ad8-8d6b-4fbe-9bd1-9aacf2c0cfa6 tempest-AttachVolumeShelveTestJSON-734519872 tempest-AttachVolumeShelveTestJSON-734519872-project-member] Lock "9d6026ba-70bf-4824-a23d-434d63e5bb85" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 889.536009] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0bbd6e60-a10a-40c6-82f2-06b41e6470a1 tempest-ServerPasswordTestJSON-1857584348 tempest-ServerPasswordTestJSON-1857584348-project-member] Acquiring lock "4c2b5eb3-9dcc-4499-9242-209289723719" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 889.536359] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0bbd6e60-a10a-40c6-82f2-06b41e6470a1 tempest-ServerPasswordTestJSON-1857584348 tempest-ServerPasswordTestJSON-1857584348-project-member] Lock "4c2b5eb3-9dcc-4499-9242-209289723719" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 900.020883] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef6bb795-8214-4a7b-bd6d-fcde051a5a16 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] Acquiring lock "24c3932c-dced-4218-8a64-a3183ffc82f1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 900.021196] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef6bb795-8214-4a7b-bd6d-fcde051a5a16 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] Lock "24c3932c-dced-4218-8a64-a3183ffc82f1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 902.701638] env[68194]: WARNING oslo_vmware.rw_handles [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 902.701638] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 902.701638] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 902.701638] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 902.701638] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 902.701638] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 902.701638] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 902.701638] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 902.701638] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 902.701638] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 902.701638] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 902.701638] env[68194]: ERROR oslo_vmware.rw_handles [ 902.702334] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/293966b5-4d84-4630-a131-7d703bacef75/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 902.704084] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 902.704335] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Copying Virtual Disk [datastore1] vmware_temp/293966b5-4d84-4630-a131-7d703bacef75/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/293966b5-4d84-4630-a131-7d703bacef75/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 902.704619] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-9ab94618-9484-4664-823b-59eb78300643 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 902.713767] env[68194]: DEBUG oslo_vmware.api [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Waiting for the task: (returnval){ [ 902.713767] env[68194]: value = "task-3466809" [ 902.713767] env[68194]: _type = "Task" [ 902.713767] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 902.721493] env[68194]: DEBUG oslo_vmware.api [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Task: {'id': task-3466809, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.223626] env[68194]: DEBUG oslo_vmware.exceptions [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 903.223896] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 903.224489] env[68194]: ERROR nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 903.224489] env[68194]: Faults: ['InvalidArgument'] [ 903.224489] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Traceback (most recent call last): [ 903.224489] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 903.224489] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] yield resources [ 903.224489] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 903.224489] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] self.driver.spawn(context, instance, image_meta, [ 903.224489] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 903.224489] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 903.224489] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 903.224489] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] self._fetch_image_if_missing(context, vi) [ 903.224489] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] image_cache(vi, tmp_image_ds_loc) [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] vm_util.copy_virtual_disk( [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] session._wait_for_task(vmdk_copy_task) [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] return self.wait_for_task(task_ref) [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] return evt.wait() [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] result = hub.switch() [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 903.224924] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] return self.greenlet.switch() [ 903.225338] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 903.225338] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] self.f(*self.args, **self.kw) [ 903.225338] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 903.225338] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] raise exceptions.translate_fault(task_info.error) [ 903.225338] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 903.225338] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Faults: ['InvalidArgument'] [ 903.225338] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] [ 903.225338] env[68194]: INFO nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Terminating instance [ 903.226323] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 903.226532] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.227149] env[68194]: DEBUG nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 903.227369] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 903.227601] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-62ed0c88-9d3f-433c-8040-0ca3292a079b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.229862] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-51bfeca9-e7a9-49b1-8496-a1afb2d596d8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.236636] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 903.236848] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-09481f31-7104-4bf0-8c70-f3ef6f55b6a7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.238931] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.239119] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 903.240080] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-774eed3d-226a-4340-aa13-af5fa1290d0b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.244892] env[68194]: DEBUG oslo_vmware.api [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Waiting for the task: (returnval){ [ 903.244892] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52be03a3-03cc-f685-7c52-f82ae4a2c436" [ 903.244892] env[68194]: _type = "Task" [ 903.244892] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.259265] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 903.259490] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Creating directory with path [datastore1] vmware_temp/4b5dc18e-65eb-4e35-9812-dcf255f79e5b/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 903.259696] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e7bdfee2-a91a-4964-8822-0c994c4b01e5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.280934] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Created directory with path [datastore1] vmware_temp/4b5dc18e-65eb-4e35-9812-dcf255f79e5b/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 903.281159] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Fetch image to [datastore1] vmware_temp/4b5dc18e-65eb-4e35-9812-dcf255f79e5b/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 903.281362] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/4b5dc18e-65eb-4e35-9812-dcf255f79e5b/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 903.282368] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66b06ac1-1916-40db-a22d-99f726442d25 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.289792] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7047b702-b090-4282-8912-32ea828322c4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.299192] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85fdd885-7a9e-47e9-af85-debc45cd48c8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.333568] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7ec5012b-1172-4944-adaa-2da0d329a140 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.336324] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 903.336520] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 903.336695] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Deleting the datastore file [datastore1] 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 903.336954] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-b732374b-c957-4aa8-b2b9-7f879d173522 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.343667] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-4b552253-af74-4bcb-b6cc-d0e1b2605835 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 903.345460] env[68194]: DEBUG oslo_vmware.api [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Waiting for the task: (returnval){ [ 903.345460] env[68194]: value = "task-3466811" [ 903.345460] env[68194]: _type = "Task" [ 903.345460] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 903.352977] env[68194]: DEBUG oslo_vmware.api [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Task: {'id': task-3466811, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 903.366044] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 903.418444] env[68194]: DEBUG oslo_vmware.rw_handles [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4b5dc18e-65eb-4e35-9812-dcf255f79e5b/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 903.477403] env[68194]: DEBUG oslo_vmware.rw_handles [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 903.477613] env[68194]: DEBUG oslo_vmware.rw_handles [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/4b5dc18e-65eb-4e35-9812-dcf255f79e5b/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 903.856213] env[68194]: DEBUG oslo_vmware.api [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Task: {'id': task-3466811, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076589} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 903.856479] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 903.856660] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 903.856832] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 903.857412] env[68194]: INFO nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Took 0.63 seconds to destroy the instance on the hypervisor. [ 903.859107] env[68194]: DEBUG nova.compute.claims [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 903.859283] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 903.859506] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 904.252346] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07ee3b2e-f5da-4ab7-8d71-0d7f5bfdd93b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.260200] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b297c5f-762b-47fe-908b-1f2c319ea040 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.290676] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0213cc5-16c0-4221-8b3f-8290b9c66c58 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.298237] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58052081-2f20-42bf-9914-a230f8addea6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.312075] env[68194]: DEBUG nova.compute.provider_tree [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 904.321543] env[68194]: DEBUG nova.scheduler.client.report [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 904.341652] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.481s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 904.342083] env[68194]: ERROR nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 904.342083] env[68194]: Faults: ['InvalidArgument'] [ 904.342083] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Traceback (most recent call last): [ 904.342083] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 904.342083] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] self.driver.spawn(context, instance, image_meta, [ 904.342083] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 904.342083] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] self._vmops.spawn(context, instance, image_meta, injected_files, [ 904.342083] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 904.342083] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] self._fetch_image_if_missing(context, vi) [ 904.342083] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 904.342083] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] image_cache(vi, tmp_image_ds_loc) [ 904.342083] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] vm_util.copy_virtual_disk( [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] session._wait_for_task(vmdk_copy_task) [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] return self.wait_for_task(task_ref) [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] return evt.wait() [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] result = hub.switch() [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] return self.greenlet.switch() [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 904.342456] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] self.f(*self.args, **self.kw) [ 904.342842] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 904.342842] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] raise exceptions.translate_fault(task_info.error) [ 904.342842] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 904.342842] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Faults: ['InvalidArgument'] [ 904.342842] env[68194]: ERROR nova.compute.manager [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] [ 904.343385] env[68194]: DEBUG nova.compute.utils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 904.345597] env[68194]: DEBUG nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Build of instance 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51 was re-scheduled: A specified parameter was not correct: fileType [ 904.345597] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 904.346234] env[68194]: DEBUG nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 904.346541] env[68194]: DEBUG nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 904.346809] env[68194]: DEBUG nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 904.347194] env[68194]: DEBUG nova.network.neutron [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 904.674615] env[68194]: DEBUG nova.network.neutron [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.686860] env[68194]: INFO nova.compute.manager [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Took 0.34 seconds to deallocate network for instance. [ 904.796717] env[68194]: INFO nova.scheduler.client.report [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Deleted allocations for instance 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51 [ 904.823017] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4faef6d2-1530-4a50-9c29-b368a6d5abef tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Lock "046c6f49-9cbd-4e5a-8a26-d0bc11a57d51" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 285.860s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 904.823017] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Lock "046c6f49-9cbd-4e5a-8a26-d0bc11a57d51" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 87.154s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 904.823017] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Acquiring lock "046c6f49-9cbd-4e5a-8a26-d0bc11a57d51-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 904.824377] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Lock "046c6f49-9cbd-4e5a-8a26-d0bc11a57d51-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 904.824377] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Lock "046c6f49-9cbd-4e5a-8a26-d0bc11a57d51-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 904.825492] env[68194]: INFO nova.compute.manager [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Terminating instance [ 904.827473] env[68194]: DEBUG nova.compute.manager [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 904.827690] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 904.828197] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-21ff82c4-5a6e-4036-8dc9-408e202f17e2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.838119] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37d8bb2a-9802-4c6e-82f2-96a1d6b582dd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 904.849964] env[68194]: DEBUG nova.compute.manager [None req-2b643a34-2c87-40bb-8b75-03c00a0e959f tempest-ServerRescueTestJSON-1260696433 tempest-ServerRescueTestJSON-1260696433-project-member] [instance: e2e34a7a-d419-4fc8-ae82-ff5874aa23d1] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 904.870614] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51 could not be found. [ 904.870929] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 904.871186] env[68194]: INFO nova.compute.manager [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Took 0.04 seconds to destroy the instance on the hypervisor. [ 904.871269] env[68194]: DEBUG oslo.service.loopingcall [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 904.871467] env[68194]: DEBUG nova.compute.manager [-] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 904.871587] env[68194]: DEBUG nova.network.neutron [-] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 904.882391] env[68194]: DEBUG nova.compute.manager [None req-2b643a34-2c87-40bb-8b75-03c00a0e959f tempest-ServerRescueTestJSON-1260696433 tempest-ServerRescueTestJSON-1260696433-project-member] [instance: e2e34a7a-d419-4fc8-ae82-ff5874aa23d1] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 904.895161] env[68194]: DEBUG nova.network.neutron [-] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 904.903409] env[68194]: INFO nova.compute.manager [-] [instance: 046c6f49-9cbd-4e5a-8a26-d0bc11a57d51] Took 0.03 seconds to deallocate network for instance. [ 904.908514] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b643a34-2c87-40bb-8b75-03c00a0e959f tempest-ServerRescueTestJSON-1260696433 tempest-ServerRescueTestJSON-1260696433-project-member] Lock "e2e34a7a-d419-4fc8-ae82-ff5874aa23d1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.421s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 904.916917] env[68194]: DEBUG nova.compute.manager [None req-1218b793-1eb3-4678-8d4c-bb645b0c9ed2 tempest-AttachInterfacesV270Test-476421757 tempest-AttachInterfacesV270Test-476421757-project-member] [instance: d9b69c9f-9ad6-4605-a7b5-54eed2035cc0] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 904.948936] env[68194]: DEBUG nova.compute.manager [None req-1218b793-1eb3-4678-8d4c-bb645b0c9ed2 tempest-AttachInterfacesV270Test-476421757 tempest-AttachInterfacesV270Test-476421757-project-member] [instance: d9b69c9f-9ad6-4605-a7b5-54eed2035cc0] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 904.969743] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1218b793-1eb3-4678-8d4c-bb645b0c9ed2 tempest-AttachInterfacesV270Test-476421757 tempest-AttachInterfacesV270Test-476421757-project-member] Lock "d9b69c9f-9ad6-4605-a7b5-54eed2035cc0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 231.477s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 904.979540] env[68194]: DEBUG nova.compute.manager [None req-67740fa2-2ee2-49b0-86fc-573f46319b6e tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] [instance: afe91255-b954-4518-b0ca-a1f4ddcfd9ba] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 905.008134] env[68194]: DEBUG nova.compute.manager [None req-67740fa2-2ee2-49b0-86fc-573f46319b6e tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] [instance: afe91255-b954-4518-b0ca-a1f4ddcfd9ba] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 905.012617] env[68194]: DEBUG oslo_concurrency.lockutils [None req-63cb1dff-dbcf-4662-80fa-502233fef443 tempest-ServerExternalEventsTest-1477763008 tempest-ServerExternalEventsTest-1477763008-project-member] Lock "046c6f49-9cbd-4e5a-8a26-d0bc11a57d51" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.190s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 905.030325] env[68194]: DEBUG oslo_concurrency.lockutils [None req-67740fa2-2ee2-49b0-86fc-573f46319b6e tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] Lock "afe91255-b954-4518-b0ca-a1f4ddcfd9ba" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.653s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 905.039025] env[68194]: DEBUG nova.compute.manager [None req-67740fa2-2ee2-49b0-86fc-573f46319b6e tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] [instance: cfa6ea83-c10f-4c87-860a-b26fb80e5f12] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 905.062380] env[68194]: DEBUG nova.compute.manager [None req-67740fa2-2ee2-49b0-86fc-573f46319b6e tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] [instance: cfa6ea83-c10f-4c87-860a-b26fb80e5f12] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 905.081991] env[68194]: DEBUG oslo_concurrency.lockutils [None req-67740fa2-2ee2-49b0-86fc-573f46319b6e tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] Lock "cfa6ea83-c10f-4c87-860a-b26fb80e5f12" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.668s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 905.090449] env[68194]: DEBUG nova.compute.manager [None req-9cdb4304-f2e0-404e-8659-c4635e91ca7b tempest-ServerGroupTestJSON-627746257 tempest-ServerGroupTestJSON-627746257-project-member] [instance: edfef42a-a968-4c36-92a4-e608037eab3f] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 905.112920] env[68194]: DEBUG nova.compute.manager [None req-9cdb4304-f2e0-404e-8659-c4635e91ca7b tempest-ServerGroupTestJSON-627746257 tempest-ServerGroupTestJSON-627746257-project-member] [instance: edfef42a-a968-4c36-92a4-e608037eab3f] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 905.137580] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9cdb4304-f2e0-404e-8659-c4635e91ca7b tempest-ServerGroupTestJSON-627746257 tempest-ServerGroupTestJSON-627746257-project-member] Lock "edfef42a-a968-4c36-92a4-e608037eab3f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.195s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 905.148461] env[68194]: DEBUG nova.compute.manager [None req-b29f273b-c9cb-40ee-ac4f-1992fd4c4cc6 tempest-AttachInterfacesUnderV243Test-948469958 tempest-AttachInterfacesUnderV243Test-948469958-project-member] [instance: bcaa671a-38d8-45a5-b772-7d856594f700] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 905.172580] env[68194]: DEBUG nova.compute.manager [None req-b29f273b-c9cb-40ee-ac4f-1992fd4c4cc6 tempest-AttachInterfacesUnderV243Test-948469958 tempest-AttachInterfacesUnderV243Test-948469958-project-member] [instance: bcaa671a-38d8-45a5-b772-7d856594f700] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 905.193891] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b29f273b-c9cb-40ee-ac4f-1992fd4c4cc6 tempest-AttachInterfacesUnderV243Test-948469958 tempest-AttachInterfacesUnderV243Test-948469958-project-member] Lock "bcaa671a-38d8-45a5-b772-7d856594f700" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.094s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 905.202636] env[68194]: DEBUG nova.compute.manager [None req-7d03cc73-1071-425b-91c3-da8f2538cc87 tempest-ServersTestBootFromVolume-1892992509 tempest-ServersTestBootFromVolume-1892992509-project-member] [instance: faf174e0-4af8-4f58-be5d-2f8915b0d58b] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 905.229088] env[68194]: DEBUG nova.compute.manager [None req-7d03cc73-1071-425b-91c3-da8f2538cc87 tempest-ServersTestBootFromVolume-1892992509 tempest-ServersTestBootFromVolume-1892992509-project-member] [instance: faf174e0-4af8-4f58-be5d-2f8915b0d58b] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 905.250686] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7d03cc73-1071-425b-91c3-da8f2538cc87 tempest-ServersTestBootFromVolume-1892992509 tempest-ServersTestBootFromVolume-1892992509-project-member] Lock "faf174e0-4af8-4f58-be5d-2f8915b0d58b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.485s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 905.259341] env[68194]: DEBUG nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 905.312342] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 905.312644] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 905.314301] env[68194]: INFO nova.compute.claims [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 905.687240] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3f364692-5652-4323-b13e-bdf3a0ad582e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.698799] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5658564-4894-4a49-907c-48f5ab1f5824 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.728403] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-415e9155-c40f-4474-b0ea-1fc0c132ee42 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.735086] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a59e7413-250f-4edc-8a61-cc02fd220af3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.749195] env[68194]: DEBUG nova.compute.provider_tree [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 905.760675] env[68194]: DEBUG nova.scheduler.client.report [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 905.774037] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.459s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 905.774037] env[68194]: DEBUG nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 905.810143] env[68194]: DEBUG nova.compute.utils [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 905.810426] env[68194]: DEBUG nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 905.810727] env[68194]: DEBUG nova.network.neutron [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 905.820240] env[68194]: DEBUG nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 905.888021] env[68194]: INFO nova.virt.block_device [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Booting with volume 173e5dc3-7861-4084-a5f6-016843b6f865 at /dev/sda [ 905.892889] env[68194]: DEBUG nova.policy [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'b6e248624fa045aa9de68d9ec18cfbc3', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5796a7d0c1f45329e0ee1b50b53d90d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 905.949125] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-05537005-fefd-4a5b-ba67-124c2c382bc5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.958349] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f47020e3-e4bd-4c43-a051-e1053e35872f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.986900] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-5e1fa55d-4fbc-4c0a-917d-4032a27d6144 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 905.994966] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6dba1d0a-aa94-4125-8402-a7907eb1b3d7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.022303] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a53d05d5-2931-4ff0-9249-43f21c4740c8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.028827] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3947154a-5e7b-4bcc-a08f-cd52c6aa04e8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.041758] env[68194]: DEBUG nova.virt.block_device [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Updating existing volume attachment record: 3b807675-215b-428e-a26d-b978e4d8f2ed {{(pid=68194) _volume_attach /opt/stack/nova/nova/virt/block_device.py:631}} [ 906.270690] env[68194]: DEBUG nova.network.neutron [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Successfully created port: 862157ec-2f0a-4023-b936-74591ce30a3c {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 906.307946] env[68194]: DEBUG nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 906.308720] env[68194]: DEBUG nova.virt.hardware [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum=,container_format=,created_at=,direct_url=,disk_format=,id=,min_disk=0,min_ram=0,name=,owner=,properties=ImageMetaProps,protected=,size=1073741824,status='active',tags=,updated_at=,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 906.308908] env[68194]: DEBUG nova.virt.hardware [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 906.309367] env[68194]: DEBUG nova.virt.hardware [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 906.309367] env[68194]: DEBUG nova.virt.hardware [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 906.309528] env[68194]: DEBUG nova.virt.hardware [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 906.309683] env[68194]: DEBUG nova.virt.hardware [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 906.309934] env[68194]: DEBUG nova.virt.hardware [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 906.310113] env[68194]: DEBUG nova.virt.hardware [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 906.310285] env[68194]: DEBUG nova.virt.hardware [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 906.310453] env[68194]: DEBUG nova.virt.hardware [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 906.310627] env[68194]: DEBUG nova.virt.hardware [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 906.312241] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8aac2b14-c0b4-49b7-84ff-f94bbe417f7e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.324021] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b218ba39-6df9-42d8-b840-8791b3dc3de6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 906.926550] env[68194]: DEBUG nova.network.neutron [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Successfully updated port: 862157ec-2f0a-4023-b936-74591ce30a3c {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 906.941524] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Acquiring lock "refresh_cache-a6920ad4-bf1c-4daa-9b9a-81e782c88a20" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 906.941524] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Acquired lock "refresh_cache-a6920ad4-bf1c-4daa-9b9a-81e782c88a20" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 906.941524] env[68194]: DEBUG nova.network.neutron [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 906.991326] env[68194]: DEBUG nova.network.neutron [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 906.999320] env[68194]: DEBUG nova.compute.manager [req-47692e37-9a76-4cae-ad38-39984948f1f7 req-9c03f91e-558a-4dd9-aa40-fd9779cceda2 service nova] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Received event network-vif-plugged-862157ec-2f0a-4023-b936-74591ce30a3c {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 906.999570] env[68194]: DEBUG oslo_concurrency.lockutils [req-47692e37-9a76-4cae-ad38-39984948f1f7 req-9c03f91e-558a-4dd9-aa40-fd9779cceda2 service nova] Acquiring lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 906.999737] env[68194]: DEBUG oslo_concurrency.lockutils [req-47692e37-9a76-4cae-ad38-39984948f1f7 req-9c03f91e-558a-4dd9-aa40-fd9779cceda2 service nova] Lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 906.999905] env[68194]: DEBUG oslo_concurrency.lockutils [req-47692e37-9a76-4cae-ad38-39984948f1f7 req-9c03f91e-558a-4dd9-aa40-fd9779cceda2 service nova] Lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 907.000114] env[68194]: DEBUG nova.compute.manager [req-47692e37-9a76-4cae-ad38-39984948f1f7 req-9c03f91e-558a-4dd9-aa40-fd9779cceda2 service nova] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] No waiting events found dispatching network-vif-plugged-862157ec-2f0a-4023-b936-74591ce30a3c {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 907.003213] env[68194]: WARNING nova.compute.manager [req-47692e37-9a76-4cae-ad38-39984948f1f7 req-9c03f91e-558a-4dd9-aa40-fd9779cceda2 service nova] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Received unexpected event network-vif-plugged-862157ec-2f0a-4023-b936-74591ce30a3c for instance with vm_state building and task_state spawning. [ 907.201129] env[68194]: DEBUG nova.network.neutron [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Updating instance_info_cache with network_info: [{"id": "862157ec-2f0a-4023-b936-74591ce30a3c", "address": "fa:16:3e:3a:19:b4", "network": {"id": "f9883d21-a9aa-4d49-a56a-f4d0b032a15a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2085008165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5796a7d0c1f45329e0ee1b50b53d90d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap862157ec-2f", "ovs_interfaceid": "862157ec-2f0a-4023-b936-74591ce30a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 907.217188] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Releasing lock "refresh_cache-a6920ad4-bf1c-4daa-9b9a-81e782c88a20" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 907.217709] env[68194]: DEBUG nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Instance network_info: |[{"id": "862157ec-2f0a-4023-b936-74591ce30a3c", "address": "fa:16:3e:3a:19:b4", "network": {"id": "f9883d21-a9aa-4d49-a56a-f4d0b032a15a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2085008165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5796a7d0c1f45329e0ee1b50b53d90d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap862157ec-2f", "ovs_interfaceid": "862157ec-2f0a-4023-b936-74591ce30a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 907.220240] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:3a:19:b4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '9d6abf71-e893-4dec-9a05-0fe7d6c0624e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '862157ec-2f0a-4023-b936-74591ce30a3c', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 907.228921] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Creating folder: Project (e5796a7d0c1f45329e0ee1b50b53d90d). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 907.234126] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a88dca24-464c-4a8d-adfd-f6abf99d65f8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.246391] env[68194]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 907.246564] env[68194]: DEBUG oslo_vmware.api [-] Fault list: [DuplicateName] {{(pid=68194) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 907.246899] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Folder already exists: Project (e5796a7d0c1f45329e0ee1b50b53d90d). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1599}} [ 907.247111] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Creating folder: Instances. Parent ref: group-v692464. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 907.247346] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-f17ef8a7-8a10-4253-9fb1-ab828c4f4ce8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.257735] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Created folder: Instances in parent group-v692464. [ 907.257896] env[68194]: DEBUG oslo.service.loopingcall [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 907.259219] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 907.259219] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-07f094e3-b220-4c89-b31c-c60e509c0eca {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.281225] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 907.281225] env[68194]: value = "task-3466814" [ 907.281225] env[68194]: _type = "Task" [ 907.281225] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.289507] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466814, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 907.557111] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Acquiring lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 907.792488] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466814, 'name': CreateVM_Task, 'duration_secs': 0.29901} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 907.792656] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 907.793296] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Block device information present: {'root_device_name': '/dev/sda', 'image': [], 'ephemerals': [], 'block_device_mapping': [{'disk_bus': None, 'mount_device': '/dev/sda', 'device_type': None, 'guest_format': None, 'boot_index': 0, 'attachment_id': '3b807675-215b-428e-a26d-b978e4d8f2ed', 'delete_on_termination': True, 'connection_info': {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-692467', 'volume_id': '173e5dc3-7861-4084-a5f6-016843b6f865', 'name': 'volume-173e5dc3-7861-4084-a5f6-016843b6f865', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6920ad4-bf1c-4daa-9b9a-81e782c88a20', 'attached_at': '', 'detached_at': '', 'volume_id': '173e5dc3-7861-4084-a5f6-016843b6f865', 'serial': '173e5dc3-7861-4084-a5f6-016843b6f865'}, 'volume_type': None}], 'swap': None} {{(pid=68194) spawn /opt/stack/nova/nova/virt/vmwareapi/vmops.py:799}} [ 907.793555] env[68194]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Root volume attach. Driver type: vmdk {{(pid=68194) attach_root_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:661}} [ 907.794425] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bbafe1b-b976-4b43-b8bd-ba1d7f4c83e1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.804186] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d54e244b-65d7-455c-a9a2-44831a78bc64 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.810067] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45a63ebc-f84f-4539-b566-bab5f4486752 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.817012] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.RelocateVM_Task with opID=oslo.vmware-df9ae541-17ea-45e0-ae07-c79fd1cb9ca4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 907.824371] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Waiting for the task: (returnval){ [ 907.824371] env[68194]: value = "task-3466815" [ 907.824371] env[68194]: _type = "Task" [ 907.824371] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 907.832216] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466815, 'name': RelocateVM_Task} progress is 5%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.339978] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466815, 'name': RelocateVM_Task, 'duration_secs': 0.0255} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.340316] env[68194]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Volume attach. Driver type: vmdk {{(pid=68194) attach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:439}} [ 908.340525] env[68194]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] _attach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-692467', 'volume_id': '173e5dc3-7861-4084-a5f6-016843b6f865', 'name': 'volume-173e5dc3-7861-4084-a5f6-016843b6f865', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6920ad4-bf1c-4daa-9b9a-81e782c88a20', 'attached_at': '', 'detached_at': '', 'volume_id': '173e5dc3-7861-4084-a5f6-016843b6f865', 'serial': '173e5dc3-7861-4084-a5f6-016843b6f865'} {{(pid=68194) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:336}} [ 908.341322] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff61114d-73bd-49f4-9287-c39ff3e27043 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.365850] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050be673-475d-4b3b-b9dc-3d44ae2efe90 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.388708] env[68194]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Reconfiguring VM instance instance-0000001e to attach disk [datastore1] volume-173e5dc3-7861-4084-a5f6-016843b6f865/volume-173e5dc3-7861-4084-a5f6-016843b6f865.vmdk or device None with type thin {{(pid=68194) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:81}} [ 908.389015] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-a3484c21-bb0c-4184-91d9-81e7c4904bd1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.408913] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Waiting for the task: (returnval){ [ 908.408913] env[68194]: value = "task-3466816" [ 908.408913] env[68194]: _type = "Task" [ 908.408913] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.418251] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466816, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 908.919909] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466816, 'name': ReconfigVM_Task, 'duration_secs': 0.260667} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 908.919909] env[68194]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Reconfigured VM instance instance-0000001e to attach disk [datastore1] volume-173e5dc3-7861-4084-a5f6-016843b6f865/volume-173e5dc3-7861-4084-a5f6-016843b6f865.vmdk or device None with type thin {{(pid=68194) attach_disk_to_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:88}} [ 908.924385] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-f09ac00e-c8d0-4edb-91bf-6004541295fb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 908.942904] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Waiting for the task: (returnval){ [ 908.942904] env[68194]: value = "task-3466817" [ 908.942904] env[68194]: _type = "Task" [ 908.942904] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 908.953830] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466817, 'name': ReconfigVM_Task} progress is 10%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.266892] env[68194]: DEBUG nova.compute.manager [req-a22012a1-2dce-4785-ad40-6b86dc584149 req-830ee110-f7e9-4f81-a2b6-fe57a013bcb4 service nova] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Received event network-changed-862157ec-2f0a-4023-b936-74591ce30a3c {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 909.267105] env[68194]: DEBUG nova.compute.manager [req-a22012a1-2dce-4785-ad40-6b86dc584149 req-830ee110-f7e9-4f81-a2b6-fe57a013bcb4 service nova] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Refreshing instance network info cache due to event network-changed-862157ec-2f0a-4023-b936-74591ce30a3c. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 909.267315] env[68194]: DEBUG oslo_concurrency.lockutils [req-a22012a1-2dce-4785-ad40-6b86dc584149 req-830ee110-f7e9-4f81-a2b6-fe57a013bcb4 service nova] Acquiring lock "refresh_cache-a6920ad4-bf1c-4daa-9b9a-81e782c88a20" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 909.267667] env[68194]: DEBUG oslo_concurrency.lockutils [req-a22012a1-2dce-4785-ad40-6b86dc584149 req-830ee110-f7e9-4f81-a2b6-fe57a013bcb4 service nova] Acquired lock "refresh_cache-a6920ad4-bf1c-4daa-9b9a-81e782c88a20" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 909.267667] env[68194]: DEBUG nova.network.neutron [req-a22012a1-2dce-4785-ad40-6b86dc584149 req-830ee110-f7e9-4f81-a2b6-fe57a013bcb4 service nova] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Refreshing network info cache for port 862157ec-2f0a-4023-b936-74591ce30a3c {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 909.453237] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466817, 'name': ReconfigVM_Task, 'duration_secs': 0.125004} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 909.453693] env[68194]: DEBUG nova.virt.vmwareapi.volumeops [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Attached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-692467', 'volume_id': '173e5dc3-7861-4084-a5f6-016843b6f865', 'name': 'volume-173e5dc3-7861-4084-a5f6-016843b6f865', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6920ad4-bf1c-4daa-9b9a-81e782c88a20', 'attached_at': '', 'detached_at': '', 'volume_id': '173e5dc3-7861-4084-a5f6-016843b6f865', 'serial': '173e5dc3-7861-4084-a5f6-016843b6f865'} {{(pid=68194) _attach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:361}} [ 909.454361] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.Rename_Task with opID=oslo.vmware-36c1ec36-2755-4054-a984-ebf4bdc4e010 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 909.460431] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Waiting for the task: (returnval){ [ 909.460431] env[68194]: value = "task-3466818" [ 909.460431] env[68194]: _type = "Task" [ 909.460431] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 909.468882] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466818, 'name': Rename_Task} progress is 5%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 909.812465] env[68194]: DEBUG nova.network.neutron [req-a22012a1-2dce-4785-ad40-6b86dc584149 req-830ee110-f7e9-4f81-a2b6-fe57a013bcb4 service nova] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Updated VIF entry in instance network info cache for port 862157ec-2f0a-4023-b936-74591ce30a3c. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 909.812818] env[68194]: DEBUG nova.network.neutron [req-a22012a1-2dce-4785-ad40-6b86dc584149 req-830ee110-f7e9-4f81-a2b6-fe57a013bcb4 service nova] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Updating instance_info_cache with network_info: [{"id": "862157ec-2f0a-4023-b936-74591ce30a3c", "address": "fa:16:3e:3a:19:b4", "network": {"id": "f9883d21-a9aa-4d49-a56a-f4d0b032a15a", "bridge": "br-int", "label": "tempest-ServerActionsV293TestJSON-2085008165-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5796a7d0c1f45329e0ee1b50b53d90d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "9d6abf71-e893-4dec-9a05-0fe7d6c0624e", "external-id": "nsx-vlan-transportzone-133", "segmentation_id": 133, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap862157ec-2f", "ovs_interfaceid": "862157ec-2f0a-4023-b936-74591ce30a3c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 909.827026] env[68194]: DEBUG oslo_concurrency.lockutils [req-a22012a1-2dce-4785-ad40-6b86dc584149 req-830ee110-f7e9-4f81-a2b6-fe57a013bcb4 service nova] Releasing lock "refresh_cache-a6920ad4-bf1c-4daa-9b9a-81e782c88a20" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 909.972691] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466818, 'name': Rename_Task} progress is 14%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.471149] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466818, 'name': Rename_Task, 'duration_secs': 0.677406} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.471444] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Powering on the VM {{(pid=68194) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1442}} [ 910.471650] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOnVM_Task with opID=oslo.vmware-163fd7d7-f0e3-47e6-b8cc-c1cf7f2774b3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 910.478371] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Waiting for the task: (returnval){ [ 910.478371] env[68194]: value = "task-3466819" [ 910.478371] env[68194]: _type = "Task" [ 910.478371] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 910.486538] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466819, 'name': PowerOnVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 910.988248] env[68194]: DEBUG oslo_vmware.api [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466819, 'name': PowerOnVM_Task, 'duration_secs': 0.446588} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 910.988600] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Powered on the VM {{(pid=68194) power_on_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1448}} [ 910.989315] env[68194]: INFO nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Took 4.68 seconds to spawn the instance on the hypervisor. [ 910.989315] env[68194]: DEBUG nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Checking state {{(pid=68194) _get_power_state /opt/stack/nova/nova/compute/manager.py:1766}} [ 910.989890] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a70d3ce-01b1-479b-839d-1143db991ea1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.037922] env[68194]: DEBUG nova.compute.utils [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Conflict updating instance a6920ad4-bf1c-4daa-9b9a-81e782c88a20. Expected: {'task_state': ['spawning']}. Actual: {'task_state': 'deleting'} {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 911.039573] env[68194]: DEBUG nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Instance disappeared during build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 911.039678] env[68194]: DEBUG nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 911.039916] env[68194]: DEBUG nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 911.040697] env[68194]: DEBUG nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 911.041048] env[68194]: DEBUG nova.network.neutron [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 911.566761] env[68194]: DEBUG nova.network.neutron [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.577511] env[68194]: DEBUG nova.compute.manager [req-985ea9df-7e7f-4fa7-a923-cb7505135319 req-f8e4a02c-5b73-45c4-a053-319a7ac3a6f7 service nova] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Received event network-vif-deleted-862157ec-2f0a-4023-b936-74591ce30a3c {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 911.577721] env[68194]: INFO nova.compute.manager [req-985ea9df-7e7f-4fa7-a923-cb7505135319 req-f8e4a02c-5b73-45c4-a053-319a7ac3a6f7 service nova] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Neutron deleted interface 862157ec-2f0a-4023-b936-74591ce30a3c; detaching it from the instance and deleting it from the info cache [ 911.578050] env[68194]: DEBUG nova.network.neutron [req-985ea9df-7e7f-4fa7-a923-cb7505135319 req-f8e4a02c-5b73-45c4-a053-319a7ac3a6f7 service nova] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 911.590570] env[68194]: INFO nova.compute.manager [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Took 0.55 seconds to deallocate network for instance. [ 911.595552] env[68194]: DEBUG oslo_concurrency.lockutils [req-985ea9df-7e7f-4fa7-a923-cb7505135319 req-f8e4a02c-5b73-45c4-a053-319a7ac3a6f7 service nova] Acquiring lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 911.702898] env[68194]: INFO nova.scheduler.client.report [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Deleted allocations for instance a6920ad4-bf1c-4daa-9b9a-81e782c88a20 [ 911.703206] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8a06d015-41bf-4eb0-947c-6ddf23602b4f tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 200.658s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 911.704513] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 4.148s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 911.704744] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Acquiring lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 911.704949] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 911.705849] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 911.707892] env[68194]: INFO nova.compute.manager [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Terminating instance [ 911.710870] env[68194]: DEBUG nova.compute.manager [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 911.711096] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Powering off the VM {{(pid=68194) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1502}} [ 911.711332] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.PowerOffVM_Task with opID=oslo.vmware-25e01a4d-a920-4d31-b2a2-b2fc109ecb05 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 911.718646] env[68194]: DEBUG oslo_vmware.api [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Waiting for the task: (returnval){ [ 911.718646] env[68194]: value = "task-3466820" [ 911.718646] env[68194]: _type = "Task" [ 911.718646] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 911.722949] env[68194]: DEBUG nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 911.731300] env[68194]: DEBUG oslo_vmware.api [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466820, 'name': PowerOffVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 911.782312] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 911.783832] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 911.789131] env[68194]: INFO nova.compute.claims [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 912.232634] env[68194]: DEBUG oslo_vmware.api [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466820, 'name': PowerOffVM_Task, 'duration_secs': 0.186666} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.235196] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Powered off the VM {{(pid=68194) power_off_instance /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1507}} [ 912.235399] env[68194]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Volume detach. Driver type: vmdk {{(pid=68194) detach_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:646}} [ 912.235769] env[68194]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] _detach_volume_vmdk: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-692467', 'volume_id': '173e5dc3-7861-4084-a5f6-016843b6f865', 'name': 'volume-173e5dc3-7861-4084-a5f6-016843b6f865', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6920ad4-bf1c-4daa-9b9a-81e782c88a20', 'attached_at': '', 'detached_at': '', 'volume_id': '173e5dc3-7861-4084-a5f6-016843b6f865', 'serial': '173e5dc3-7861-4084-a5f6-016843b6f865'} {{(pid=68194) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:571}} [ 912.236676] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ad25434-2495-4f7f-9548-1e137e49b981 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.256150] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ab655c4d-527a-4498-bff4-c62c7f1a2895 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.269026] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16f27aea-3266-4cee-ab1c-6c65504d5cba {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.290136] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bfe4d484-42d2-4c97-868c-ffe7dc890ece {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.293485] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f04f33d8-f80d-4357-9ed8-3d864d60c95f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.311909] env[68194]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] The volume has not been displaced from its original location: [datastore1] volume-173e5dc3-7861-4084-a5f6-016843b6f865/volume-173e5dc3-7861-4084-a5f6-016843b6f865.vmdk. No consolidation needed. {{(pid=68194) _consolidate_vmdk_volume /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:504}} [ 912.317184] env[68194]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Reconfiguring VM instance instance-0000001e to detach disk 2000 {{(pid=68194) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:122}} [ 912.321343] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afa33186-37b5-48b5-b3e1-f3dea357b216 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.321460] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-3ac44db6-0819-4d1e-924c-0ca39c67d324 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.362825] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef6b94fe-761e-4a62-815e-e7488a912f1e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.368017] env[68194]: DEBUG oslo_vmware.api [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Waiting for the task: (returnval){ [ 912.368017] env[68194]: value = "task-3466821" [ 912.368017] env[68194]: _type = "Task" [ 912.368017] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.372785] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45ab5dc3-5f41-43b5-a372-34ae801b76ad {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.380999] env[68194]: DEBUG oslo_vmware.api [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466821, 'name': ReconfigVM_Task} progress is 5%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.390628] env[68194]: DEBUG nova.compute.provider_tree [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 912.401460] env[68194]: DEBUG nova.scheduler.client.report [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 912.422299] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.640s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 912.422837] env[68194]: DEBUG nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 912.469061] env[68194]: DEBUG nova.compute.utils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 912.470408] env[68194]: DEBUG nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Not allocating networking since 'none' was specified. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 912.491419] env[68194]: DEBUG nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 912.567450] env[68194]: DEBUG nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 912.599043] env[68194]: DEBUG nova.virt.hardware [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 912.599297] env[68194]: DEBUG nova.virt.hardware [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 912.599452] env[68194]: DEBUG nova.virt.hardware [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 912.602510] env[68194]: DEBUG nova.virt.hardware [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 912.602696] env[68194]: DEBUG nova.virt.hardware [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 912.602858] env[68194]: DEBUG nova.virt.hardware [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 912.603098] env[68194]: DEBUG nova.virt.hardware [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 912.603260] env[68194]: DEBUG nova.virt.hardware [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 912.603427] env[68194]: DEBUG nova.virt.hardware [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 912.603614] env[68194]: DEBUG nova.virt.hardware [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 912.603800] env[68194]: DEBUG nova.virt.hardware [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 912.606520] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80b72c6-0953-4927-965a-4dbc0b0d01a8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.616023] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f11715d8-ce99-4296-bca6-0bd86e4884ce {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.634900] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Instance VIF info [] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 912.642302] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Creating folder: Project (ef615e729f5e4b79b48e64ab5f737f6f). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 912.643954] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7bf2aab1-13bb-4848-bd87-3b1d3faf652a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.652903] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Created folder: Project (ef615e729f5e4b79b48e64ab5f737f6f) in parent group-v692426. [ 912.653121] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Creating folder: Instances. Parent ref: group-v692482. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 912.653620] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d5b5750b-732f-46d6-9341-0c7f1dd51309 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.663032] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Created folder: Instances in parent group-v692482. [ 912.663228] env[68194]: DEBUG oslo.service.loopingcall [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 912.663408] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 912.663696] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-34170606-1933-4609-8820-bd25851b40f2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.681545] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 912.681545] env[68194]: value = "task-3466824" [ 912.681545] env[68194]: _type = "Task" [ 912.681545] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.690023] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466824, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 912.881473] env[68194]: DEBUG oslo_vmware.api [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466821, 'name': ReconfigVM_Task, 'duration_secs': 0.176916} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 912.881737] env[68194]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Reconfigured VM instance instance-0000001e to detach disk 2000 {{(pid=68194) detach_disk_from_vm /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:127}} [ 912.887053] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.ReconfigVM_Task with opID=oslo.vmware-efc6efce-cb51-4e7d-a48e-9e6799650007 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 912.908288] env[68194]: DEBUG oslo_vmware.api [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Waiting for the task: (returnval){ [ 912.908288] env[68194]: value = "task-3466825" [ 912.908288] env[68194]: _type = "Task" [ 912.908288] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 912.918954] env[68194]: DEBUG oslo_vmware.api [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466825, 'name': ReconfigVM_Task} progress is 6%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.191162] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466824, 'name': CreateVM_Task, 'duration_secs': 0.274846} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.191419] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 913.191874] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 913.192084] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 913.192420] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 913.192669] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-25d029df-69e2-4617-bc19-bc507f03d4a9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.197399] env[68194]: DEBUG oslo_vmware.api [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Waiting for the task: (returnval){ [ 913.197399] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52095ab2-85a1-1018-a996-bd85ed36d616" [ 913.197399] env[68194]: _type = "Task" [ 913.197399] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.205222] env[68194]: DEBUG oslo_vmware.api [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52095ab2-85a1-1018-a996-bd85ed36d616, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.422490] env[68194]: DEBUG oslo_vmware.api [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466825, 'name': ReconfigVM_Task, 'duration_secs': 0.122185} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 913.422490] env[68194]: DEBUG nova.virt.vmwareapi.volumeops [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Detached VMDK: {'driver_volume_type': 'vmdk', 'data': {'volume': 'vm-692467', 'volume_id': '173e5dc3-7861-4084-a5f6-016843b6f865', 'name': 'volume-173e5dc3-7861-4084-a5f6-016843b6f865', 'profile_id': None, 'qos_specs': None, 'access_mode': 'rw', 'encrypted': False, 'cacheable': False}, 'status': 'reserved', 'instance': 'a6920ad4-bf1c-4daa-9b9a-81e782c88a20', 'attached_at': '', 'detached_at': '', 'volume_id': '173e5dc3-7861-4084-a5f6-016843b6f865', 'serial': '173e5dc3-7861-4084-a5f6-016843b6f865'} {{(pid=68194) _detach_volume_vmdk /opt/stack/nova/nova/virt/vmwareapi/volumeops.py:605}} [ 913.422490] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 913.423282] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f234b006-529a-4854-933a-9030bf6f0a23 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.429896] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 913.430148] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9e865007-1018-43f3-a3a5-f2d71566ca78 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.487746] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 913.488142] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 913.488428] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Deleting the datastore file [datastore1] a6920ad4-bf1c-4daa-9b9a-81e782c88a20 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 913.488840] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e169c663-bc28-4d15-b666-1c529e999b34 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 913.496142] env[68194]: DEBUG oslo_vmware.api [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Waiting for the task: (returnval){ [ 913.496142] env[68194]: value = "task-3466827" [ 913.496142] env[68194]: _type = "Task" [ 913.496142] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 913.506198] env[68194]: DEBUG oslo_vmware.api [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466827, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 913.711392] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 913.712154] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 913.712554] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 914.011495] env[68194]: DEBUG oslo_vmware.api [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Task: {'id': task-3466827, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.109206} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 914.011854] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 914.012523] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 914.012784] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 914.013480] env[68194]: INFO nova.compute.manager [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Took 2.30 seconds to destroy the instance on the hypervisor. [ 914.013807] env[68194]: DEBUG oslo.service.loopingcall [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 914.014217] env[68194]: DEBUG nova.compute.manager [-] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 914.014354] env[68194]: DEBUG nova.network.neutron [-] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 914.043595] env[68194]: DEBUG nova.network.neutron [-] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 914.056054] env[68194]: INFO nova.compute.manager [-] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Took 0.04 seconds to deallocate network for instance. [ 914.133173] env[68194]: INFO nova.compute.manager [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Took 0.08 seconds to detach 1 volumes for instance. [ 914.135691] env[68194]: DEBUG nova.compute.manager [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Deleting volume: 173e5dc3-7861-4084-a5f6-016843b6f865 {{(pid=68194) _cleanup_volumes /opt/stack/nova/nova/compute/manager.py:3221}} [ 914.220313] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.update_usage" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 914.220618] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 914.221046] env[68194]: DEBUG nova.objects.instance [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Lazy-loading 'resources' on Instance uuid a6920ad4-bf1c-4daa-9b9a-81e782c88a20 {{(pid=68194) obj_load_attr /opt/stack/nova/nova/objects/instance.py:1152}} [ 914.571809] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-038edad8-3868-43ec-b4af-d466814b8465 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.579683] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-225d3f5d-c06b-4726-a130-7026eb36885e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.608948] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4091c30-a89a-4ce0-9f5f-cdd359def0fe {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.616244] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c13d1f15-f7fb-4571-92b3-d3bd09157f16 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.630708] env[68194]: DEBUG nova.compute.provider_tree [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 914.639747] env[68194]: DEBUG nova.scheduler.client.report [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 914.655060] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.update_usage" :: held 0.434s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 914.721348] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e6256187-2f56-4b44-a62b-ea766f708e87 tempest-ServerActionsV293TestJSON-2013597729 tempest-ServerActionsV293TestJSON-2013597729-project-member] Lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 3.017s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 914.722269] env[68194]: DEBUG oslo_concurrency.lockutils [req-985ea9df-7e7f-4fa7-a923-cb7505135319 req-f8e4a02c-5b73-45c4-a053-319a7ac3a6f7 service nova] Acquired lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 914.723243] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a54f47c5-1a3c-4804-a367-71d8b5e0088e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.731965] env[68194]: WARNING suds.client [-] Web service reported a SOAP processing fault using an unexpected HTTP status code 200. Reporting as an internal server error. [ 914.732200] env[68194]: DEBUG oslo_vmware.api [-] Fault list: [ManagedObjectNotFound] {{(pid=68194) _invoke_api /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:337}} [ 914.733502] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b08e2bd7-cd99-4782-b568-efc441bc9115 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.742266] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0eea3b82-aa8d-4c32-89b4-88e04fc8f2f8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 914.780037] env[68194]: ERROR root [req-985ea9df-7e7f-4fa7-a923-cb7505135319 req-f8e4a02c-5b73-45c4-a053-319a7ac3a6f7 service nova] Original exception being dropped: ['Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 377, in request_handler\n response = request(managed_object, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 586, in __call__\n return client.invoke(args, kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 728, in invoke\n result = self.send(soapenv, timeout=timeout)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 777, in send\n return self.process_reply(reply.message, None, None)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/suds/client.py", line 840, in process_reply\n raise WebFault(fault, replyroot)\n', "suds.WebFault: Server raised fault: 'The object 'vim.VirtualMachine:vm-692481' has already been deleted or has not been completely created'\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 301, in _invoke_api\n return api_method(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 480, in get_object_property\n props = get_object_properties(vim, moref, [property_name],\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/vim_util.py", line 360, in get_object_properties\n retrieve_result = vim.RetrievePropertiesEx(\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py", line 413, in request_handler\n raise exceptions.VimFaultException(fault_list, fault_string,\n', "oslo_vmware.exceptions.VimFaultException: The object 'vim.VirtualMachine:vm-692481' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-692481' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-692481'}\n", '\nDuring handling of the above exception, another exception occurred:\n\n', 'Traceback (most recent call last):\n', ' File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 123, in _call_method\n return self.invoke_api(module, method, self.vim, *args,\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 358, in invoke_api\n return _invoke_api(module, method, *args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 122, in func\n return evt.wait()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait\n result = hub.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch\n return self.greenlet.switch()\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 122, in _inner\n idle = self.f(*self.args, **self.kw)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 96, in _func\n result = f(*args, **kwargs)\n', ' File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 341, in _invoke_api\n raise clazz(str(excep),\n', "oslo_vmware.exceptions.ManagedObjectNotFoundException: The object 'vim.VirtualMachine:vm-692481' has already been deleted or has not been completely created\nCause: Server raised fault: 'The object 'vim.VirtualMachine:vm-692481' has already been deleted or has not been completely created'\nFaults: [ManagedObjectNotFound]\nDetails: {'obj': 'vm-692481'}\n"]: nova.exception.InstanceNotFound: Instance a6920ad4-bf1c-4daa-9b9a-81e782c88a20 could not be found. [ 914.780415] env[68194]: DEBUG oslo_concurrency.lockutils [req-985ea9df-7e7f-4fa7-a923-cb7505135319 req-f8e4a02c-5b73-45c4-a053-319a7ac3a6f7 service nova] Releasing lock "a6920ad4-bf1c-4daa-9b9a-81e782c88a20" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 914.780978] env[68194]: DEBUG nova.compute.manager [req-985ea9df-7e7f-4fa7-a923-cb7505135319 req-f8e4a02c-5b73-45c4-a053-319a7ac3a6f7 service nova] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Detach interface failed, port_id=862157ec-2f0a-4023-b936-74591ce30a3c, reason: Instance a6920ad4-bf1c-4daa-9b9a-81e782c88a20 could not be found. {{(pid=68194) _process_instance_vif_deleted_event /opt/stack/nova/nova/compute/manager.py:10935}} [ 915.376425] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 915.376728] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 922.396845] env[68194]: DEBUG oslo_concurrency.lockutils [None req-263ea940-b565-4867-a31f-26ebfff91aa9 tempest-ServerAddressesTestJSON-524673289 tempest-ServerAddressesTestJSON-524673289-project-member] Acquiring lock "d42d0fa9-08f2-40d9-958f-775e55fb0ea1" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 922.397187] env[68194]: DEBUG oslo_concurrency.lockutils [None req-263ea940-b565-4867-a31f-26ebfff91aa9 tempest-ServerAddressesTestJSON-524673289 tempest-ServerAddressesTestJSON-524673289-project-member] Lock "d42d0fa9-08f2-40d9-958f-775e55fb0ea1" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 924.589591] env[68194]: DEBUG oslo_concurrency.lockutils [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquiring lock "d2e2cf0b-1028-4df3-9170-dc616a04fdc3" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 931.440538] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.416911] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.416911] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.416911] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 932.416911] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 934.416621] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.416910] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.417055] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 934.429779] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 934.429987] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 934.430146] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 934.430274] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 934.431376] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95846928-7142-4b97-905c-c36bc2353812 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.440262] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa9a051d-f2a3-4f0a-b715-9a5c4e14b673 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.454139] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c82ed69f-e3ff-478c-b309-772ae53f60f3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.460319] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e6500a4-f618-46c6-8c41-4121a49d476f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 934.489080] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180969MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 934.489255] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 934.489458] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 934.582978] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3d27a0be-599b-4bb4-89db-ff79d33047c8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 934.583175] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 10df4090-9ec0-4876-8925-23e585344a3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 934.583290] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4d692986-413f-4c9b-b5cc-de43d2ca498d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 934.583417] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b8aaf064-e8a6-444a-83cd-6a7e02b82f33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 934.583538] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e575e1c7-7f35-41de-96e7-0771a4137bf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 934.583664] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fe79ae03-c408-4d18-914e-e64065998663 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 934.583783] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cef5dc8e-1a5c-4248-9bac-ff25880588ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 934.583897] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2243c245-bbb3-43b7-89a9-fb727d452885 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 934.584019] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 934.584204] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 934.597810] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ff16d7c1-a601-4ac6-be52-823727c8b843 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.607946] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.617706] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1f155fbf-6460-4426-b6cf-176d44415eee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.626883] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8216e910-66b8-4147-a264-93e7eeefc7da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.636588] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ce0f1886-189f-4ab3-9ed6-376dce542f5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.646296] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e9321912-2500-406b-b504-7668258a0c00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.656152] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8b885df7-e241-452a-bcaa-861b491a6ee0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.667208] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 337db74c-cea1-4760-a06e-c33cfb4d1de9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.677057] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fd693e84-5f26-4382-af13-d703dbbee894 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.686867] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e683863a-2b50-4681-a192-6955dc36562b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.697304] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance f81311b2-917b-425b-8ad9-627f08548402 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.706863] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fb4dd17b-dc02-4086-b450-9449212ed7b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.716578] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 9d6026ba-70bf-4824-a23d-434d63e5bb85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.725794] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4c2b5eb3-9dcc-4499-9242-209289723719 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.735646] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 24c3932c-dced-4218-8a64-a3183ffc82f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.744646] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.753177] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d42d0fa9-08f2-40d9-958f-775e55fb0ea1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 934.753410] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 934.753558] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 935.057806] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77b9d813-11ac-472c-bb5e-88dc7745568c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.065561] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff2f4440-ecce-4b44-a6d4-d0b4456f56f0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.095502] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecfd8ba3-25a0-4231-9511-2ea434a2c838 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.102999] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bcafd4c9-4e47-4e9e-a112-75363031b178 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 935.116757] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 935.131496] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 935.144650] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 935.144650] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.655s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 936.139928] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 936.139928] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 936.139928] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 936.139928] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 936.160880] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 936.161071] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 936.161182] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 936.161312] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 936.161439] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 936.161562] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: fe79ae03-c408-4d18-914e-e64065998663] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 936.161684] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 936.161805] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 936.161926] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 936.162068] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 936.162200] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 936.162684] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 953.335047] env[68194]: WARNING oslo_vmware.rw_handles [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 953.335047] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 953.335047] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 953.335047] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 953.335047] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 953.335047] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 953.335047] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 953.335047] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 953.335047] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 953.335047] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 953.335047] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 953.335047] env[68194]: ERROR oslo_vmware.rw_handles [ 953.335692] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/4b5dc18e-65eb-4e35-9812-dcf255f79e5b/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 953.337535] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 953.337789] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Copying Virtual Disk [datastore1] vmware_temp/4b5dc18e-65eb-4e35-9812-dcf255f79e5b/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/4b5dc18e-65eb-4e35-9812-dcf255f79e5b/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 953.338091] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-eb67d370-456c-4df9-98db-d784e422b1da {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.345635] env[68194]: DEBUG oslo_vmware.api [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Waiting for the task: (returnval){ [ 953.345635] env[68194]: value = "task-3466829" [ 953.345635] env[68194]: _type = "Task" [ 953.345635] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.353940] env[68194]: DEBUG oslo_vmware.api [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Task: {'id': task-3466829, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.856229] env[68194]: DEBUG oslo_vmware.exceptions [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 953.856516] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 953.857069] env[68194]: ERROR nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 953.857069] env[68194]: Faults: ['InvalidArgument'] [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Traceback (most recent call last): [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] yield resources [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] self.driver.spawn(context, instance, image_meta, [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] self._fetch_image_if_missing(context, vi) [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] image_cache(vi, tmp_image_ds_loc) [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] vm_util.copy_virtual_disk( [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] session._wait_for_task(vmdk_copy_task) [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] return self.wait_for_task(task_ref) [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] return evt.wait() [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] result = hub.switch() [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] return self.greenlet.switch() [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] self.f(*self.args, **self.kw) [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] raise exceptions.translate_fault(task_info.error) [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Faults: ['InvalidArgument'] [ 953.857069] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] [ 953.858238] env[68194]: INFO nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Terminating instance [ 953.858945] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 953.859170] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 953.859416] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d6f90c1d-5813-4282-b012-c659b21fd731 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.861771] env[68194]: DEBUG nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 953.861857] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 953.862598] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e593872-296f-4cca-8884-2a35a1db9cf4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.869805] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 953.870066] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-34d38955-1ec3-4afc-bf19-669a7a0434f7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.872405] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 953.872610] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 953.873603] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-aa22c237-5dda-4277-b824-33e4d19c8f75 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.878258] env[68194]: DEBUG oslo_vmware.api [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Waiting for the task: (returnval){ [ 953.878258] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]525fdc08-8b99-b7c0-7e71-b2c9ee63cea9" [ 953.878258] env[68194]: _type = "Task" [ 953.878258] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.885435] env[68194]: DEBUG oslo_vmware.api [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]525fdc08-8b99-b7c0-7e71-b2c9ee63cea9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 953.936800] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 953.937017] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 953.937207] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Deleting the datastore file [datastore1] 3d27a0be-599b-4bb4-89db-ff79d33047c8 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 953.937476] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-3014232a-7736-49f0-aac6-67dc2499d8b4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 953.943754] env[68194]: DEBUG oslo_vmware.api [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Waiting for the task: (returnval){ [ 953.943754] env[68194]: value = "task-3466831" [ 953.943754] env[68194]: _type = "Task" [ 953.943754] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 953.951102] env[68194]: DEBUG oslo_vmware.api [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Task: {'id': task-3466831, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 954.389100] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 954.389374] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Creating directory with path [datastore1] vmware_temp/c64c4cf4-a1f7-4c3f-ba5e-c5622ff41d98/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 954.389581] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c81dd16e-2544-4627-b500-6b3813d58702 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.402118] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Created directory with path [datastore1] vmware_temp/c64c4cf4-a1f7-4c3f-ba5e-c5622ff41d98/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 954.402332] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Fetch image to [datastore1] vmware_temp/c64c4cf4-a1f7-4c3f-ba5e-c5622ff41d98/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 954.402523] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/c64c4cf4-a1f7-4c3f-ba5e-c5622ff41d98/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 954.403302] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0db2c49d-c639-4088-9323-37b39e8af921 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.409893] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f02c5912-4906-47a8-afdd-51e689ad59c2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.419176] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2b37f91e-7412-4d45-8322-f4db8ecee42c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.451987] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbfc815f-8625-4ad5-af55-565127209da7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.459618] env[68194]: DEBUG oslo_vmware.api [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Task: {'id': task-3466831, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075503} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 954.460949] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 954.461143] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 954.461313] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 954.461484] env[68194]: INFO nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Took 0.60 seconds to destroy the instance on the hypervisor. [ 954.463520] env[68194]: DEBUG nova.compute.claims [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 954.463683] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 954.463891] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 954.466376] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ac1caae0-295f-43fb-9c45-3ad7d1d55722 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.490676] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 954.543278] env[68194]: DEBUG oslo_vmware.rw_handles [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c64c4cf4-a1f7-4c3f-ba5e-c5622ff41d98/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 954.606021] env[68194]: DEBUG oslo_vmware.rw_handles [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 954.606021] env[68194]: DEBUG oslo_vmware.rw_handles [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/c64c4cf4-a1f7-4c3f-ba5e-c5622ff41d98/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 954.863502] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c20f1689-6698-4dd1-8b4f-03f1504439e6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.872010] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8b5d813d-ea30-485f-b9fc-8b7f9facbcf4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.903418] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16dc874e-fffe-444d-ba6f-786634114d5d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.911022] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c4063f4-832d-426d-9d15-d5fd10467618 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 954.924072] env[68194]: DEBUG nova.compute.provider_tree [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 954.932742] env[68194]: DEBUG nova.scheduler.client.report [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 954.946869] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.483s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 954.947515] env[68194]: ERROR nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 954.947515] env[68194]: Faults: ['InvalidArgument'] [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Traceback (most recent call last): [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] self.driver.spawn(context, instance, image_meta, [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] self._fetch_image_if_missing(context, vi) [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] image_cache(vi, tmp_image_ds_loc) [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] vm_util.copy_virtual_disk( [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] session._wait_for_task(vmdk_copy_task) [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] return self.wait_for_task(task_ref) [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] return evt.wait() [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] result = hub.switch() [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] return self.greenlet.switch() [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] self.f(*self.args, **self.kw) [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] raise exceptions.translate_fault(task_info.error) [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Faults: ['InvalidArgument'] [ 954.947515] env[68194]: ERROR nova.compute.manager [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] [ 954.948589] env[68194]: DEBUG nova.compute.utils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 954.949608] env[68194]: DEBUG nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Build of instance 3d27a0be-599b-4bb4-89db-ff79d33047c8 was re-scheduled: A specified parameter was not correct: fileType [ 954.949608] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 954.949968] env[68194]: DEBUG nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 954.950152] env[68194]: DEBUG nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 954.950349] env[68194]: DEBUG nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 954.950520] env[68194]: DEBUG nova.network.neutron [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 955.278480] env[68194]: DEBUG nova.network.neutron [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.301962] env[68194]: INFO nova.compute.manager [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Took 0.35 seconds to deallocate network for instance. [ 955.403957] env[68194]: INFO nova.scheduler.client.report [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Deleted allocations for instance 3d27a0be-599b-4bb4-89db-ff79d33047c8 [ 955.426442] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7281e784-e5cf-46cc-8e4b-2b2ff54e41b9 tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Lock "3d27a0be-599b-4bb4-89db-ff79d33047c8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 332.781s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 955.428186] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Lock "3d27a0be-599b-4bb4-89db-ff79d33047c8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 133.091s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 955.428440] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Acquiring lock "3d27a0be-599b-4bb4-89db-ff79d33047c8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 955.428654] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Lock "3d27a0be-599b-4bb4-89db-ff79d33047c8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 955.428827] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Lock "3d27a0be-599b-4bb4-89db-ff79d33047c8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 955.431071] env[68194]: INFO nova.compute.manager [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Terminating instance [ 955.434215] env[68194]: DEBUG nova.compute.manager [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 955.434479] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 955.435093] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea7300c6-82e4-400d-8f3b-121f4b8503a5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.437719] env[68194]: DEBUG nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 955.448384] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b5633d2-6639-47d6-a221-ad18daec50fa {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.477639] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3d27a0be-599b-4bb4-89db-ff79d33047c8 could not be found. [ 955.477856] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 955.478052] env[68194]: INFO nova.compute.manager [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Took 0.04 seconds to destroy the instance on the hypervisor. [ 955.478306] env[68194]: DEBUG oslo.service.loopingcall [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 955.479217] env[68194]: DEBUG nova.compute.manager [-] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 955.479327] env[68194]: DEBUG nova.network.neutron [-] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 955.494782] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 955.495033] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 955.496448] env[68194]: INFO nova.compute.claims [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 955.509883] env[68194]: DEBUG nova.network.neutron [-] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 955.525393] env[68194]: INFO nova.compute.manager [-] [instance: 3d27a0be-599b-4bb4-89db-ff79d33047c8] Took 0.05 seconds to deallocate network for instance. [ 955.628382] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0af47153-e3c0-43d3-8eb5-1d8728a7b2fb tempest-ImagesNegativeTestJSON-1667231185 tempest-ImagesNegativeTestJSON-1667231185-project-member] Lock "3d27a0be-599b-4bb4-89db-ff79d33047c8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.200s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 955.882851] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-575fa94c-f31f-4664-9acd-13e25384765e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.890470] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f576c978-f866-47ed-bc47-f00ad97a9949 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.920612] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2eb0290-a9a8-45bb-9e53-4ddb77649e6b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.929759] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a36901b-9460-4ef8-bc48-70a0afeb7912 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 955.941512] env[68194]: DEBUG nova.compute.provider_tree [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 955.952491] env[68194]: DEBUG nova.scheduler.client.report [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 955.967959] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.473s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 955.968484] env[68194]: DEBUG nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 956.019835] env[68194]: DEBUG nova.compute.utils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 956.021371] env[68194]: DEBUG nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 956.021560] env[68194]: DEBUG nova.network.neutron [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 956.032824] env[68194]: DEBUG nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 956.041608] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquiring lock "ff16d7c1-a601-4ac6-be52-823727c8b843" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 956.103028] env[68194]: DEBUG nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 956.106849] env[68194]: DEBUG nova.policy [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c0f73c2608bb4cde8965d08111c0182a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b7d3b30153ba4f4d80444b94b680407f', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 956.126522] env[68194]: DEBUG nova.virt.hardware [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 956.126522] env[68194]: DEBUG nova.virt.hardware [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 956.126522] env[68194]: DEBUG nova.virt.hardware [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 956.126688] env[68194]: DEBUG nova.virt.hardware [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 956.126774] env[68194]: DEBUG nova.virt.hardware [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 956.126943] env[68194]: DEBUG nova.virt.hardware [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 956.127570] env[68194]: DEBUG nova.virt.hardware [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 956.127570] env[68194]: DEBUG nova.virt.hardware [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 956.127570] env[68194]: DEBUG nova.virt.hardware [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 956.127798] env[68194]: DEBUG nova.virt.hardware [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 956.127798] env[68194]: DEBUG nova.virt.hardware [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 956.128652] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f67b2f1f-85ac-4470-82ca-d168d2a3eb86 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.137150] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de8cebda-e411-4fa5-8d8e-1d9af24a2e74 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 956.462601] env[68194]: DEBUG nova.network.neutron [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Successfully created port: 84edd7ee-51a0-4fc3-ad04-c7c683d5b15f {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 957.226926] env[68194]: DEBUG nova.network.neutron [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Successfully updated port: 84edd7ee-51a0-4fc3-ad04-c7c683d5b15f {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 957.245572] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquiring lock "refresh_cache-ff16d7c1-a601-4ac6-be52-823727c8b843" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 957.245749] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquired lock "refresh_cache-ff16d7c1-a601-4ac6-be52-823727c8b843" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 957.245909] env[68194]: DEBUG nova.network.neutron [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 957.288359] env[68194]: DEBUG nova.network.neutron [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 957.458318] env[68194]: DEBUG nova.network.neutron [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Updating instance_info_cache with network_info: [{"id": "84edd7ee-51a0-4fc3-ad04-c7c683d5b15f", "address": "fa:16:3e:71:76:7f", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84edd7ee-51", "ovs_interfaceid": "84edd7ee-51a0-4fc3-ad04-c7c683d5b15f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 957.469480] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Releasing lock "refresh_cache-ff16d7c1-a601-4ac6-be52-823727c8b843" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 957.469790] env[68194]: DEBUG nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Instance network_info: |[{"id": "84edd7ee-51a0-4fc3-ad04-c7c683d5b15f", "address": "fa:16:3e:71:76:7f", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84edd7ee-51", "ovs_interfaceid": "84edd7ee-51a0-4fc3-ad04-c7c683d5b15f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 957.470198] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:71:76:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '84edd7ee-51a0-4fc3-ad04-c7c683d5b15f', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 957.477869] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Creating folder: Project (b7d3b30153ba4f4d80444b94b680407f). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 957.478470] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-046ec317-0500-4e46-8a63-c5d9cc76621f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.490095] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Created folder: Project (b7d3b30153ba4f4d80444b94b680407f) in parent group-v692426. [ 957.490317] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Creating folder: Instances. Parent ref: group-v692485. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 957.490593] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7685a11e-a44c-43f3-8829-7d27cd08bb80 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.500255] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Created folder: Instances in parent group-v692485. [ 957.500546] env[68194]: DEBUG oslo.service.loopingcall [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 957.500780] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 957.501043] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-c20a0d39-544b-4510-a820-2e615fd18096 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 957.536446] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 957.536446] env[68194]: value = "task-3466834" [ 957.536446] env[68194]: _type = "Task" [ 957.536446] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 957.545095] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466834, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 957.834945] env[68194]: DEBUG nova.compute.manager [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Received event network-vif-plugged-84edd7ee-51a0-4fc3-ad04-c7c683d5b15f {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 957.835218] env[68194]: DEBUG oslo_concurrency.lockutils [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] Acquiring lock "ff16d7c1-a601-4ac6-be52-823727c8b843-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 957.835514] env[68194]: DEBUG oslo_concurrency.lockutils [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] Lock "ff16d7c1-a601-4ac6-be52-823727c8b843-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 957.835704] env[68194]: DEBUG oslo_concurrency.lockutils [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] Lock "ff16d7c1-a601-4ac6-be52-823727c8b843-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 957.835870] env[68194]: DEBUG nova.compute.manager [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] No waiting events found dispatching network-vif-plugged-84edd7ee-51a0-4fc3-ad04-c7c683d5b15f {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 957.836303] env[68194]: WARNING nova.compute.manager [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Received unexpected event network-vif-plugged-84edd7ee-51a0-4fc3-ad04-c7c683d5b15f for instance with vm_state building and task_state spawning. [ 957.836523] env[68194]: DEBUG nova.compute.manager [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Received event network-changed-84edd7ee-51a0-4fc3-ad04-c7c683d5b15f {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 957.836836] env[68194]: DEBUG nova.compute.manager [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Refreshing instance network info cache due to event network-changed-84edd7ee-51a0-4fc3-ad04-c7c683d5b15f. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 957.837134] env[68194]: DEBUG oslo_concurrency.lockutils [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] Acquiring lock "refresh_cache-ff16d7c1-a601-4ac6-be52-823727c8b843" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 957.837464] env[68194]: DEBUG oslo_concurrency.lockutils [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] Acquired lock "refresh_cache-ff16d7c1-a601-4ac6-be52-823727c8b843" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 957.837879] env[68194]: DEBUG nova.network.neutron [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Refreshing network info cache for port 84edd7ee-51a0-4fc3-ad04-c7c683d5b15f {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 958.047146] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466834, 'name': CreateVM_Task, 'duration_secs': 0.267656} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 958.047370] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 958.048060] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 958.048274] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 958.048625] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 958.048914] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-860463ea-e78a-4209-9c62-d74fef26cde6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 958.053472] env[68194]: DEBUG oslo_vmware.api [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Waiting for the task: (returnval){ [ 958.053472] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52cbfcfa-69e9-b299-0cfb-4b5665d92289" [ 958.053472] env[68194]: _type = "Task" [ 958.053472] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 958.061390] env[68194]: DEBUG oslo_vmware.api [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52cbfcfa-69e9-b299-0cfb-4b5665d92289, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 958.304262] env[68194]: DEBUG nova.network.neutron [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Updated VIF entry in instance network info cache for port 84edd7ee-51a0-4fc3-ad04-c7c683d5b15f. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 958.304262] env[68194]: DEBUG nova.network.neutron [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Updating instance_info_cache with network_info: [{"id": "84edd7ee-51a0-4fc3-ad04-c7c683d5b15f", "address": "fa:16:3e:71:76:7f", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84edd7ee-51", "ovs_interfaceid": "84edd7ee-51a0-4fc3-ad04-c7c683d5b15f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 958.318173] env[68194]: DEBUG oslo_concurrency.lockutils [req-a9014b0a-149a-4610-85e8-4871c025b6f2 req-f433bd34-33b9-41f1-808e-7cdcf403c55b service nova] Releasing lock "refresh_cache-ff16d7c1-a601-4ac6-be52-823727c8b843" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 958.564945] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 958.565358] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 958.565594] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 959.973338] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquiring lock "7ed8ac34-04a2-49fe-9429-f636ff6fff8a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 959.973692] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Lock "7ed8ac34-04a2-49fe-9429-f636ff6fff8a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 992.416881] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.417160] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 992.417410] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 993.417307] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 994.416580] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 994.416874] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 994.429565] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 994.429846] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 994.429963] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 994.430144] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 994.431299] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04acfbf6-7d51-423c-add6-7c24e8cd7bdf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.440387] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-232e3939-24e0-46a0-a739-e1652457b83d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.454209] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-366f1222-2d76-427c-a13a-1bc1e75fdd7c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.460441] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1fe71e0f-e23c-4245-bd0c-a5feb98fe835 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 994.490646] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180962MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 994.490809] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 994.491023] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 994.575375] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 10df4090-9ec0-4876-8925-23e585344a3b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.575641] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4d692986-413f-4c9b-b5cc-de43d2ca498d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.575779] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b8aaf064-e8a6-444a-83cd-6a7e02b82f33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.575902] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e575e1c7-7f35-41de-96e7-0771a4137bf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.576075] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fe79ae03-c408-4d18-914e-e64065998663 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.576153] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cef5dc8e-1a5c-4248-9bac-ff25880588ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.576269] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2243c245-bbb3-43b7-89a9-fb727d452885 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.576385] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.576573] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.576701] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ff16d7c1-a601-4ac6-be52-823727c8b843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 994.589285] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.600718] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1f155fbf-6460-4426-b6cf-176d44415eee has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.611326] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8216e910-66b8-4147-a264-93e7eeefc7da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.622809] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ce0f1886-189f-4ab3-9ed6-376dce542f5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.632806] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e9321912-2500-406b-b504-7668258a0c00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.661162] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8b885df7-e241-452a-bcaa-861b491a6ee0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.671475] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 337db74c-cea1-4760-a06e-c33cfb4d1de9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.681266] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fd693e84-5f26-4382-af13-d703dbbee894 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.692146] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e683863a-2b50-4681-a192-6955dc36562b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.701240] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance f81311b2-917b-425b-8ad9-627f08548402 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.710257] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fb4dd17b-dc02-4086-b450-9449212ed7b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.719421] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 9d6026ba-70bf-4824-a23d-434d63e5bb85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.729484] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4c2b5eb3-9dcc-4499-9242-209289723719 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.740457] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 24c3932c-dced-4218-8a64-a3183ffc82f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.756175] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.765933] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d42d0fa9-08f2-40d9-958f-775e55fb0ea1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.777774] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 994.777774] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 994.777774] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 995.067366] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5cb64121-438e-4f9a-9a7a-5fb5c09d3b5d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.074563] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fd41cd57-6ad1-4f93-a1ca-a266f14fea18 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.104151] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93c66202-a10c-476e-a6fe-1f13b2a4be06 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.111231] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-62ecc365-a329-4af5-bd5e-4ea7d900fe55 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 995.124095] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 995.133813] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 995.147579] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 995.147643] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.657s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 996.146735] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 996.417097] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 996.417097] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 996.417097] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 996.437885] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 996.438066] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 996.438184] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 996.438308] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 996.438431] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: fe79ae03-c408-4d18-914e-e64065998663] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 996.438551] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 996.438675] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 996.438827] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 996.438978] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 996.439110] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 996.439244] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 996.439904] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 997.434328] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1003.318741] env[68194]: WARNING oslo_vmware.rw_handles [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1003.318741] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1003.318741] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1003.318741] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1003.318741] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1003.318741] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1003.318741] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1003.318741] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1003.318741] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1003.318741] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1003.318741] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1003.318741] env[68194]: ERROR oslo_vmware.rw_handles [ 1003.319354] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/c64c4cf4-a1f7-4c3f-ba5e-c5622ff41d98/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1003.321060] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1003.321326] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Copying Virtual Disk [datastore1] vmware_temp/c64c4cf4-a1f7-4c3f-ba5e-c5622ff41d98/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/c64c4cf4-a1f7-4c3f-ba5e-c5622ff41d98/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1003.321607] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-54552104-4968-4804-84d9-1132e6328aef {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.330009] env[68194]: DEBUG oslo_vmware.api [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Waiting for the task: (returnval){ [ 1003.330009] env[68194]: value = "task-3466835" [ 1003.330009] env[68194]: _type = "Task" [ 1003.330009] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.337970] env[68194]: DEBUG oslo_vmware.api [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Task: {'id': task-3466835, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.840580] env[68194]: DEBUG oslo_vmware.exceptions [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1003.840896] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1003.841481] env[68194]: ERROR nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1003.841481] env[68194]: Faults: ['InvalidArgument'] [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Traceback (most recent call last): [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] yield resources [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] self.driver.spawn(context, instance, image_meta, [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] self._fetch_image_if_missing(context, vi) [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] image_cache(vi, tmp_image_ds_loc) [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] vm_util.copy_virtual_disk( [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] session._wait_for_task(vmdk_copy_task) [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] return self.wait_for_task(task_ref) [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] return evt.wait() [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] result = hub.switch() [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] return self.greenlet.switch() [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] self.f(*self.args, **self.kw) [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] raise exceptions.translate_fault(task_info.error) [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Faults: ['InvalidArgument'] [ 1003.841481] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] [ 1003.842299] env[68194]: INFO nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Terminating instance [ 1003.843419] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1003.843652] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1003.843938] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b47fd757-6384-461e-9f4b-5daf4f67e2a5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.846253] env[68194]: DEBUG nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1003.846451] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1003.847247] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-74351b63-c2be-44f0-abcc-72b3cee027c1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.854135] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1003.854403] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1bf89ec5-a220-4af1-acc6-6f85bf8e417b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.856843] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1003.857135] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1003.857681] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cce6435b-d50b-4683-8c8b-827f0dbe69fa {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.862336] env[68194]: DEBUG oslo_vmware.api [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Waiting for the task: (returnval){ [ 1003.862336] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52b8435f-481e-5225-115f-a1320acd0f68" [ 1003.862336] env[68194]: _type = "Task" [ 1003.862336] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.869523] env[68194]: DEBUG oslo_vmware.api [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52b8435f-481e-5225-115f-a1320acd0f68, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1003.922312] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1003.922603] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1003.922790] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Deleting the datastore file [datastore1] 10df4090-9ec0-4876-8925-23e585344a3b {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1003.923074] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-67a9b9c5-6160-40aa-8ce9-d4bb025118c5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1003.929490] env[68194]: DEBUG oslo_vmware.api [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Waiting for the task: (returnval){ [ 1003.929490] env[68194]: value = "task-3466837" [ 1003.929490] env[68194]: _type = "Task" [ 1003.929490] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1003.937449] env[68194]: DEBUG oslo_vmware.api [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Task: {'id': task-3466837, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1004.372333] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1004.372606] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Creating directory with path [datastore1] vmware_temp/d4cae5c0-71ce-4423-a6d3-6c2989fc89f8/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1004.372810] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-009506fb-edb6-46a3-aafc-4f1012d9783d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.384846] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Created directory with path [datastore1] vmware_temp/d4cae5c0-71ce-4423-a6d3-6c2989fc89f8/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1004.385047] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Fetch image to [datastore1] vmware_temp/d4cae5c0-71ce-4423-a6d3-6c2989fc89f8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1004.385223] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/d4cae5c0-71ce-4423-a6d3-6c2989fc89f8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1004.385972] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7332520e-3ce9-4c30-a435-1c2b40b4b448 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.392007] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9ba5e328-e91a-4119-b215-29648dbb8ce5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.400707] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d734e1de-2e8c-422c-b190-1c1c7632654f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.430245] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98bf3741-c419-4f9f-b15e-ac1ea12417a4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.440886] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a33f6a5e-521a-4a3d-961f-af375c757590 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.442503] env[68194]: DEBUG oslo_vmware.api [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Task: {'id': task-3466837, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.082447} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1004.442741] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1004.442919] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1004.443105] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1004.443281] env[68194]: INFO nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1004.445281] env[68194]: DEBUG nova.compute.claims [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1004.445449] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1004.445658] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1004.463246] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1004.519941] env[68194]: DEBUG oslo_vmware.rw_handles [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d4cae5c0-71ce-4423-a6d3-6c2989fc89f8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1004.579115] env[68194]: DEBUG oslo_vmware.rw_handles [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1004.579296] env[68194]: DEBUG oslo_vmware.rw_handles [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d4cae5c0-71ce-4423-a6d3-6c2989fc89f8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1004.833733] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77febc95-14d7-46aa-8712-b0ea8cd507bd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.841537] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e81224-a900-4823-b0f3-27871d033527 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.871750] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eca89e23-2d87-4202-ae0d-adb282191506 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.879846] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e59f64f0-a69f-4bf9-a981-28c5a6ace828 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1004.892812] env[68194]: DEBUG nova.compute.provider_tree [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1004.901900] env[68194]: DEBUG nova.scheduler.client.report [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1004.915391] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.470s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1004.915987] env[68194]: ERROR nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1004.915987] env[68194]: Faults: ['InvalidArgument'] [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Traceback (most recent call last): [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] self.driver.spawn(context, instance, image_meta, [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] self._fetch_image_if_missing(context, vi) [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] image_cache(vi, tmp_image_ds_loc) [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] vm_util.copy_virtual_disk( [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] session._wait_for_task(vmdk_copy_task) [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] return self.wait_for_task(task_ref) [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] return evt.wait() [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] result = hub.switch() [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] return self.greenlet.switch() [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] self.f(*self.args, **self.kw) [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] raise exceptions.translate_fault(task_info.error) [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Faults: ['InvalidArgument'] [ 1004.915987] env[68194]: ERROR nova.compute.manager [instance: 10df4090-9ec0-4876-8925-23e585344a3b] [ 1004.917774] env[68194]: DEBUG nova.compute.utils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1004.918169] env[68194]: DEBUG nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Build of instance 10df4090-9ec0-4876-8925-23e585344a3b was re-scheduled: A specified parameter was not correct: fileType [ 1004.918169] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1004.918541] env[68194]: DEBUG nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1004.918716] env[68194]: DEBUG nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1004.918884] env[68194]: DEBUG nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1004.919059] env[68194]: DEBUG nova.network.neutron [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1005.248271] env[68194]: DEBUG nova.network.neutron [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.262317] env[68194]: INFO nova.compute.manager [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Took 0.34 seconds to deallocate network for instance. [ 1005.359544] env[68194]: INFO nova.scheduler.client.report [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Deleted allocations for instance 10df4090-9ec0-4876-8925-23e585344a3b [ 1005.381319] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6836f1ca-f773-490f-a398-dab1964b5c9f tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Lock "10df4090-9ec0-4876-8925-23e585344a3b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 382.737s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1005.382357] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Lock "10df4090-9ec0-4876-8925-23e585344a3b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 183.906s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1005.382582] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Acquiring lock "10df4090-9ec0-4876-8925-23e585344a3b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1005.382790] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Lock "10df4090-9ec0-4876-8925-23e585344a3b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1005.382961] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Lock "10df4090-9ec0-4876-8925-23e585344a3b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1005.386090] env[68194]: INFO nova.compute.manager [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Terminating instance [ 1005.387258] env[68194]: DEBUG nova.compute.manager [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1005.387462] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1005.391718] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-93e3bd54-92a6-4363-bd0b-d6b6f1a28150 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.400173] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9820b7b-6a74-43f4-80a4-f76385a80062 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.412674] env[68194]: DEBUG nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1005.432936] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 10df4090-9ec0-4876-8925-23e585344a3b could not be found. [ 1005.433132] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1005.433384] env[68194]: INFO nova.compute.manager [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1005.433665] env[68194]: DEBUG oslo.service.loopingcall [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1005.433956] env[68194]: DEBUG nova.compute.manager [-] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1005.435193] env[68194]: DEBUG nova.network.neutron [-] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1005.458920] env[68194]: DEBUG nova.network.neutron [-] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1005.466322] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1005.466569] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1005.469299] env[68194]: INFO nova.compute.claims [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1005.472112] env[68194]: INFO nova.compute.manager [-] [instance: 10df4090-9ec0-4876-8925-23e585344a3b] Took 0.04 seconds to deallocate network for instance. [ 1005.564384] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d8e73d24-e03c-4c6e-ab77-bd8d8602c0b4 tempest-ImagesOneServerTestJSON-1548365022 tempest-ImagesOneServerTestJSON-1548365022-project-member] Lock "10df4090-9ec0-4876-8925-23e585344a3b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1005.807646] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c587b0a-9373-4e0c-9ba3-7ae0b20fd5f3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.815319] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1050f7d6-a25e-4104-850c-18a5706be15f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.844452] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8a2bf15-a170-410c-a675-509a9dec3112 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.851613] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5c0e391-415c-470b-b968-0a9d8632db28 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1005.865264] env[68194]: DEBUG nova.compute.provider_tree [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1005.877225] env[68194]: DEBUG nova.scheduler.client.report [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1005.893361] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.427s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1005.893980] env[68194]: DEBUG nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1005.929258] env[68194]: DEBUG nova.compute.utils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1005.930762] env[68194]: DEBUG nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1005.930762] env[68194]: DEBUG nova.network.neutron [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1005.940610] env[68194]: DEBUG nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1006.007338] env[68194]: DEBUG nova.policy [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '59d7f876ff3f49efb129c9008019e6ad', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '517c4285db484365ba03370a938aa545', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1006.010273] env[68194]: DEBUG nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1006.036260] env[68194]: DEBUG nova.virt.hardware [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1006.036511] env[68194]: DEBUG nova.virt.hardware [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1006.036668] env[68194]: DEBUG nova.virt.hardware [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1006.036940] env[68194]: DEBUG nova.virt.hardware [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1006.037202] env[68194]: DEBUG nova.virt.hardware [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1006.037754] env[68194]: DEBUG nova.virt.hardware [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1006.037754] env[68194]: DEBUG nova.virt.hardware [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1006.037754] env[68194]: DEBUG nova.virt.hardware [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1006.037887] env[68194]: DEBUG nova.virt.hardware [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1006.038072] env[68194]: DEBUG nova.virt.hardware [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1006.038284] env[68194]: DEBUG nova.virt.hardware [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1006.039102] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8513c11e-ae56-4463-ade1-fbc60f2d3cef {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.047868] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-888a0280-c3eb-4704-b623-b54b94048909 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1006.695663] env[68194]: DEBUG nova.network.neutron [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Successfully created port: df22c8df-71be-44a0-8bd1-58b61789f86d {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1007.532981] env[68194]: DEBUG nova.network.neutron [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Successfully updated port: df22c8df-71be-44a0-8bd1-58b61789f86d {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1007.546551] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquiring lock "refresh_cache-20f4ed05-ee86-416b-8bf7-d446d33bab6f" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1007.546683] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquired lock "refresh_cache-20f4ed05-ee86-416b-8bf7-d446d33bab6f" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1007.546915] env[68194]: DEBUG nova.network.neutron [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1007.591384] env[68194]: DEBUG nova.network.neutron [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1007.687029] env[68194]: DEBUG nova.compute.manager [req-dada2905-11c2-4f4b-8d60-a476967054dd req-1ed52fcc-a056-4aad-a895-95fab94cffc4 service nova] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Received event network-vif-plugged-df22c8df-71be-44a0-8bd1-58b61789f86d {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1007.688022] env[68194]: DEBUG oslo_concurrency.lockutils [req-dada2905-11c2-4f4b-8d60-a476967054dd req-1ed52fcc-a056-4aad-a895-95fab94cffc4 service nova] Acquiring lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1007.688022] env[68194]: DEBUG oslo_concurrency.lockutils [req-dada2905-11c2-4f4b-8d60-a476967054dd req-1ed52fcc-a056-4aad-a895-95fab94cffc4 service nova] Lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1007.688022] env[68194]: DEBUG oslo_concurrency.lockutils [req-dada2905-11c2-4f4b-8d60-a476967054dd req-1ed52fcc-a056-4aad-a895-95fab94cffc4 service nova] Lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1007.688022] env[68194]: DEBUG nova.compute.manager [req-dada2905-11c2-4f4b-8d60-a476967054dd req-1ed52fcc-a056-4aad-a895-95fab94cffc4 service nova] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] No waiting events found dispatching network-vif-plugged-df22c8df-71be-44a0-8bd1-58b61789f86d {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1007.688022] env[68194]: WARNING nova.compute.manager [req-dada2905-11c2-4f4b-8d60-a476967054dd req-1ed52fcc-a056-4aad-a895-95fab94cffc4 service nova] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Received unexpected event network-vif-plugged-df22c8df-71be-44a0-8bd1-58b61789f86d for instance with vm_state building and task_state spawning. [ 1007.769294] env[68194]: DEBUG nova.network.neutron [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Updating instance_info_cache with network_info: [{"id": "df22c8df-71be-44a0-8bd1-58b61789f86d", "address": "fa:16:3e:35:7b:1f", "network": {"id": "d6684a7f-b5c2-4dcd-8a4e-38a28cd5c0e2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-54528567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "517c4285db484365ba03370a938aa545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30f1dacf-8988-41b8-aa8f-e9530f65ef46", "external-id": "nsx-vlan-transportzone-561", "segmentation_id": 561, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf22c8df-71", "ovs_interfaceid": "df22c8df-71be-44a0-8bd1-58b61789f86d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1007.784690] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Releasing lock "refresh_cache-20f4ed05-ee86-416b-8bf7-d446d33bab6f" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1007.785162] env[68194]: DEBUG nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Instance network_info: |[{"id": "df22c8df-71be-44a0-8bd1-58b61789f86d", "address": "fa:16:3e:35:7b:1f", "network": {"id": "d6684a7f-b5c2-4dcd-8a4e-38a28cd5c0e2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-54528567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "517c4285db484365ba03370a938aa545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30f1dacf-8988-41b8-aa8f-e9530f65ef46", "external-id": "nsx-vlan-transportzone-561", "segmentation_id": 561, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf22c8df-71", "ovs_interfaceid": "df22c8df-71be-44a0-8bd1-58b61789f86d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1007.785406] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:35:7b:1f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '30f1dacf-8988-41b8-aa8f-e9530f65ef46', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'df22c8df-71be-44a0-8bd1-58b61789f86d', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1007.794180] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Creating folder: Project (517c4285db484365ba03370a938aa545). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1007.794736] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-0d27a614-9d94-483e-a051-d6b7106a5bd0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.805176] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Created folder: Project (517c4285db484365ba03370a938aa545) in parent group-v692426. [ 1007.805359] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Creating folder: Instances. Parent ref: group-v692488. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1007.805578] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-e11341f7-9bbc-4c3b-a539-8f0ba6c92b7a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.815382] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Created folder: Instances in parent group-v692488. [ 1007.815842] env[68194]: DEBUG oslo.service.loopingcall [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1007.816059] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1007.816262] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0d20935a-fef7-40f2-aa1a-0bdf1267ad96 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1007.835156] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1007.835156] env[68194]: value = "task-3466840" [ 1007.835156] env[68194]: _type = "Task" [ 1007.835156] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1007.842302] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466840, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.345780] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466840, 'name': CreateVM_Task, 'duration_secs': 0.300519} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1008.345966] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1008.348312] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1008.348480] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1008.348831] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1008.349135] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-94d3cbd3-fda6-4d9d-9f65-af3de67d88a3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1008.353724] env[68194]: DEBUG oslo_vmware.api [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Waiting for the task: (returnval){ [ 1008.353724] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5272a26d-ac33-3f91-0fe8-8169d5a1030b" [ 1008.353724] env[68194]: _type = "Task" [ 1008.353724] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1008.361295] env[68194]: DEBUG oslo_vmware.api [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5272a26d-ac33-3f91-0fe8-8169d5a1030b, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1008.866676] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1008.867059] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1008.867180] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1009.829994] env[68194]: DEBUG nova.compute.manager [req-502f0915-af6d-49bd-a0de-741066c59c96 req-fe6c90a0-16d4-43f1-97a0-30425c49312f service nova] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Received event network-changed-df22c8df-71be-44a0-8bd1-58b61789f86d {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1009.830290] env[68194]: DEBUG nova.compute.manager [req-502f0915-af6d-49bd-a0de-741066c59c96 req-fe6c90a0-16d4-43f1-97a0-30425c49312f service nova] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Refreshing instance network info cache due to event network-changed-df22c8df-71be-44a0-8bd1-58b61789f86d. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1009.830553] env[68194]: DEBUG oslo_concurrency.lockutils [req-502f0915-af6d-49bd-a0de-741066c59c96 req-fe6c90a0-16d4-43f1-97a0-30425c49312f service nova] Acquiring lock "refresh_cache-20f4ed05-ee86-416b-8bf7-d446d33bab6f" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1009.830998] env[68194]: DEBUG oslo_concurrency.lockutils [req-502f0915-af6d-49bd-a0de-741066c59c96 req-fe6c90a0-16d4-43f1-97a0-30425c49312f service nova] Acquired lock "refresh_cache-20f4ed05-ee86-416b-8bf7-d446d33bab6f" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1009.831242] env[68194]: DEBUG nova.network.neutron [req-502f0915-af6d-49bd-a0de-741066c59c96 req-fe6c90a0-16d4-43f1-97a0-30425c49312f service nova] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Refreshing network info cache for port df22c8df-71be-44a0-8bd1-58b61789f86d {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1010.402738] env[68194]: DEBUG nova.network.neutron [req-502f0915-af6d-49bd-a0de-741066c59c96 req-fe6c90a0-16d4-43f1-97a0-30425c49312f service nova] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Updated VIF entry in instance network info cache for port df22c8df-71be-44a0-8bd1-58b61789f86d. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1010.402738] env[68194]: DEBUG nova.network.neutron [req-502f0915-af6d-49bd-a0de-741066c59c96 req-fe6c90a0-16d4-43f1-97a0-30425c49312f service nova] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Updating instance_info_cache with network_info: [{"id": "df22c8df-71be-44a0-8bd1-58b61789f86d", "address": "fa:16:3e:35:7b:1f", "network": {"id": "d6684a7f-b5c2-4dcd-8a4e-38a28cd5c0e2", "bridge": "br-int", "label": "tempest-ServersAdminTestJSON-54528567-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "517c4285db484365ba03370a938aa545", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "30f1dacf-8988-41b8-aa8f-e9530f65ef46", "external-id": "nsx-vlan-transportzone-561", "segmentation_id": 561, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapdf22c8df-71", "ovs_interfaceid": "df22c8df-71be-44a0-8bd1-58b61789f86d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1010.414114] env[68194]: DEBUG oslo_concurrency.lockutils [req-502f0915-af6d-49bd-a0de-741066c59c96 req-fe6c90a0-16d4-43f1-97a0-30425c49312f service nova] Releasing lock "refresh_cache-20f4ed05-ee86-416b-8bf7-d446d33bab6f" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1014.118684] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquiring lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1022.423055] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquiring lock "bcb53c97-8d95-4d67-b310-d19087b0b298" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1022.423404] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "bcb53c97-8d95-4d67-b310-d19087b0b298" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1023.234701] env[68194]: DEBUG oslo_concurrency.lockutils [None req-fc0cbb24-0a7d-4392-b4ed-d6dbdb96d654 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquiring lock "f141f989-f0c8-4943-982d-adf499342ec3" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1023.234945] env[68194]: DEBUG oslo_concurrency.lockutils [None req-fc0cbb24-0a7d-4392-b4ed-d6dbdb96d654 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "f141f989-f0c8-4943-982d-adf499342ec3" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1023.807843] env[68194]: DEBUG oslo_concurrency.lockutils [None req-17c84160-d67c-46a1-a481-3a64e5662361 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquiring lock "811aeb6b-eb94-4618-a0da-f391184cbd70" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1023.808136] env[68194]: DEBUG oslo_concurrency.lockutils [None req-17c84160-d67c-46a1-a481-3a64e5662361 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "811aeb6b-eb94-4618-a0da-f391184cbd70" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1025.545024] env[68194]: DEBUG oslo_concurrency.lockutils [None req-84c32697-447a-474a-bf61-24d4205a6db5 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquiring lock "554a4ee3-092b-443a-99fc-63d9d753c8ec" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1025.545024] env[68194]: DEBUG oslo_concurrency.lockutils [None req-84c32697-447a-474a-bf61-24d4205a6db5 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Lock "554a4ee3-092b-443a-99fc-63d9d753c8ec" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1026.519260] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] Acquiring lock "b5596341-1994-4187-a240-7e02d1534ea0" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1026.519795] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] Lock "b5596341-1994-4187-a240-7e02d1534ea0" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1026.545993] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] Acquiring lock "60be602e-d9e3-4f0f-972e-e19acbb3813e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1026.545993] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] Lock "60be602e-d9e3-4f0f-972e-e19acbb3813e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1026.572393] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] Acquiring lock "ef28d606-6b14-4fd1-9768-4dfc90b06fd5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1026.572898] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] Lock "ef28d606-6b14-4fd1-9768-4dfc90b06fd5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.003s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1038.054935] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3f8e19db-4904-4dd5-829b-b620b1d6ca05 tempest-ServerMetadataNegativeTestJSON-1420612404 tempest-ServerMetadataNegativeTestJSON-1420612404-project-member] Acquiring lock "ec03aeaf-11e6-456f-b408-77557f77645b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1038.055209] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3f8e19db-4904-4dd5-829b-b620b1d6ca05 tempest-ServerMetadataNegativeTestJSON-1420612404 tempest-ServerMetadataNegativeTestJSON-1420612404-project-member] Lock "ec03aeaf-11e6-456f-b408-77557f77645b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1047.915960] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0292630c-1f92-4ba3-8569-fe1c8bf5d3b5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "1ef2d43f-9e6f-4354-91c5-9e1155c2a382" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1047.916293] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0292630c-1f92-4ba3-8569-fe1c8bf5d3b5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "1ef2d43f-9e6f-4354-91c5-9e1155c2a382" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1051.410931] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1052.671934] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f344be3f-461d-4cdc-8d9a-92b7cfd668d0 tempest-ServersNegativeTestMultiTenantJSON-951148991 tempest-ServersNegativeTestMultiTenantJSON-951148991-project-member] Acquiring lock "6ee32532-e88c-4eb6-9e3f-c1ea42a4d560" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1052.672193] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f344be3f-461d-4cdc-8d9a-92b7cfd668d0 tempest-ServersNegativeTestMultiTenantJSON-951148991 tempest-ServersNegativeTestMultiTenantJSON-951148991-project-member] Lock "6ee32532-e88c-4eb6-9e3f-c1ea42a4d560" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1053.339825] env[68194]: WARNING oslo_vmware.rw_handles [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1053.339825] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1053.339825] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1053.339825] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1053.339825] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1053.339825] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1053.339825] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1053.339825] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1053.339825] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1053.339825] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1053.339825] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1053.339825] env[68194]: ERROR oslo_vmware.rw_handles [ 1053.340288] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/d4cae5c0-71ce-4423-a6d3-6c2989fc89f8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1053.342478] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1053.342760] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Copying Virtual Disk [datastore1] vmware_temp/d4cae5c0-71ce-4423-a6d3-6c2989fc89f8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/d4cae5c0-71ce-4423-a6d3-6c2989fc89f8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1053.343073] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0719f12c-8ff0-4628-be29-9ccd4e6ded4f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.354980] env[68194]: DEBUG oslo_vmware.api [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Waiting for the task: (returnval){ [ 1053.354980] env[68194]: value = "task-3466841" [ 1053.354980] env[68194]: _type = "Task" [ 1053.354980] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.366508] env[68194]: DEBUG oslo_vmware.api [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Task: {'id': task-3466841, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.416146] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.416395] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1053.416551] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1053.866654] env[68194]: DEBUG oslo_vmware.exceptions [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1053.866654] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1053.867036] env[68194]: ERROR nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1053.867036] env[68194]: Faults: ['InvalidArgument'] [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Traceback (most recent call last): [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] yield resources [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] self.driver.spawn(context, instance, image_meta, [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] self._fetch_image_if_missing(context, vi) [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] image_cache(vi, tmp_image_ds_loc) [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] vm_util.copy_virtual_disk( [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] session._wait_for_task(vmdk_copy_task) [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] return self.wait_for_task(task_ref) [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] return evt.wait() [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] result = hub.switch() [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] return self.greenlet.switch() [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] self.f(*self.args, **self.kw) [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] raise exceptions.translate_fault(task_info.error) [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Faults: ['InvalidArgument'] [ 1053.867036] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] [ 1053.867809] env[68194]: INFO nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Terminating instance [ 1053.868849] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1053.869090] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1053.870029] env[68194]: DEBUG nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1053.870029] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1053.870212] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-12219f53-fd24-4389-af53-23f8695378c6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.872419] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1b74e3a-01ca-4e1f-80a7-cc565b903f56 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.879223] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1053.879430] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-7f993ddf-852a-4ac4-839f-0518d445958a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.881906] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1053.882104] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1053.884028] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-1f0d5d07-aead-4a44-bfc2-8424f8a38130 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.888949] env[68194]: DEBUG oslo_vmware.api [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Waiting for the task: (returnval){ [ 1053.888949] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52991113-9349-9c7f-e041-3b270eef0391" [ 1053.888949] env[68194]: _type = "Task" [ 1053.888949] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.896106] env[68194]: DEBUG oslo_vmware.api [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52991113-9349-9c7f-e041-3b270eef0391, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1053.953103] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1053.953350] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1053.953540] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Deleting the datastore file [datastore1] 4d692986-413f-4c9b-b5cc-de43d2ca498d {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1053.953808] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fbfab470-33d1-455e-8724-a54163ae7d1f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1053.960137] env[68194]: DEBUG oslo_vmware.api [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Waiting for the task: (returnval){ [ 1053.960137] env[68194]: value = "task-3466843" [ 1053.960137] env[68194]: _type = "Task" [ 1053.960137] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1053.969108] env[68194]: DEBUG oslo_vmware.api [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Task: {'id': task-3466843, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1054.398948] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1054.399221] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Creating directory with path [datastore1] vmware_temp/d3370e26-b256-40ce-8e11-d64633ce53dd/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1054.399458] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-43952386-a4e9-4efa-a4a9-a3a76de3fa78 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.410831] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Created directory with path [datastore1] vmware_temp/d3370e26-b256-40ce-8e11-d64633ce53dd/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1054.411014] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Fetch image to [datastore1] vmware_temp/d3370e26-b256-40ce-8e11-d64633ce53dd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1054.411190] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/d3370e26-b256-40ce-8e11-d64633ce53dd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1054.411866] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-677b9428-251e-4f44-a5ba-45da9def2753 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.416146] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1054.418970] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88e77f03-d836-4884-86d6-f339a7ee5fab {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.427827] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-052a1e33-139c-430c-a90a-73dc12994a8d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.457517] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5941f671-84ba-4b22-8029-02d717561ed4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.465342] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-33983f52-b0a4-4fcb-bed3-b6a69ec0d38f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.469335] env[68194]: DEBUG oslo_vmware.api [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Task: {'id': task-3466843, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075556} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1054.469820] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1054.470013] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1054.470197] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1054.470367] env[68194]: INFO nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1054.472365] env[68194]: DEBUG nova.compute.claims [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1054.472533] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1054.472738] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1054.488529] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1054.539357] env[68194]: DEBUG oslo_vmware.rw_handles [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d3370e26-b256-40ce-8e11-d64633ce53dd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1054.601754] env[68194]: DEBUG oslo_vmware.rw_handles [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1054.601945] env[68194]: DEBUG oslo_vmware.rw_handles [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d3370e26-b256-40ce-8e11-d64633ce53dd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1054.985409] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-963d54e7-e59a-4dce-a755-7d0287ff0dc5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1054.993984] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-adc79c33-e103-4151-8a95-80c366cee7ce {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.026486] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e086e74c-caef-4457-92ba-6078d0572aef {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.033712] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aae9e31c-07b5-45bd-a662-6869f560c860 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.046670] env[68194]: DEBUG nova.compute.provider_tree [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1055.055738] env[68194]: DEBUG nova.scheduler.client.report [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1055.069967] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.597s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1055.070517] env[68194]: ERROR nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1055.070517] env[68194]: Faults: ['InvalidArgument'] [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Traceback (most recent call last): [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] self.driver.spawn(context, instance, image_meta, [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] self._fetch_image_if_missing(context, vi) [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] image_cache(vi, tmp_image_ds_loc) [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] vm_util.copy_virtual_disk( [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] session._wait_for_task(vmdk_copy_task) [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] return self.wait_for_task(task_ref) [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] return evt.wait() [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] result = hub.switch() [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] return self.greenlet.switch() [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] self.f(*self.args, **self.kw) [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] raise exceptions.translate_fault(task_info.error) [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Faults: ['InvalidArgument'] [ 1055.070517] env[68194]: ERROR nova.compute.manager [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] [ 1055.071225] env[68194]: DEBUG nova.compute.utils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1055.072674] env[68194]: DEBUG nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Build of instance 4d692986-413f-4c9b-b5cc-de43d2ca498d was re-scheduled: A specified parameter was not correct: fileType [ 1055.072674] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1055.073058] env[68194]: DEBUG nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1055.073242] env[68194]: DEBUG nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1055.073415] env[68194]: DEBUG nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1055.073583] env[68194]: DEBUG nova.network.neutron [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1055.387008] env[68194]: DEBUG nova.network.neutron [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.397052] env[68194]: INFO nova.compute.manager [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Took 0.32 seconds to deallocate network for instance. [ 1055.417240] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1055.417240] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1055.432015] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1055.432015] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1055.432015] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1055.432015] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1055.432769] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc3e2ddd-8389-4bf7-a423-ee8894f66a52 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.445951] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6d1d44d7-ad32-4534-b72f-0aa532b295d5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.459668] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-742b7b3e-f412-4d94-886e-731267ada6e1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.467065] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1e81c713-fae0-4157-ad46-a15b11e008dc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.501391] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180967MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1055.501557] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1055.501753] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1055.504778] env[68194]: INFO nova.scheduler.client.report [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Deleted allocations for instance 4d692986-413f-4c9b-b5cc-de43d2ca498d [ 1055.545195] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76477dd1-0259-49f8-9147-1ca00c4377f0 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Lock "4d692986-413f-4c9b-b5cc-de43d2ca498d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 431.677s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1055.546757] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Lock "4d692986-413f-4c9b-b5cc-de43d2ca498d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 233.435s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1055.546854] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Acquiring lock "4d692986-413f-4c9b-b5cc-de43d2ca498d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1055.547168] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Lock "4d692986-413f-4c9b-b5cc-de43d2ca498d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1055.547689] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Lock "4d692986-413f-4c9b-b5cc-de43d2ca498d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1055.549070] env[68194]: INFO nova.compute.manager [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Terminating instance [ 1055.550745] env[68194]: DEBUG nova.compute.manager [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1055.550918] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1055.551735] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a595f52a-08ce-43cd-9c16-ab668cc8a177 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.557863] env[68194]: DEBUG nova.compute.manager [None req-2c677f55-1024-4ed0-a2c4-7da6f53c3696 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 1f155fbf-6460-4426-b6cf-176d44415eee] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1055.567182] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41a5b9e6-3042-432f-8470-b1f9c3e75c71 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1055.579870] env[68194]: DEBUG nova.compute.manager [None req-2c677f55-1024-4ed0-a2c4-7da6f53c3696 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 1f155fbf-6460-4426-b6cf-176d44415eee] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1055.581476] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b8aaf064-e8a6-444a-83cd-6a7e02b82f33 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.581620] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e575e1c7-7f35-41de-96e7-0771a4137bf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.581747] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fe79ae03-c408-4d18-914e-e64065998663 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.581870] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cef5dc8e-1a5c-4248-9bac-ff25880588ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.582018] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2243c245-bbb3-43b7-89a9-fb727d452885 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.582605] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.582680] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.582894] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ff16d7c1-a601-4ac6-be52-823727c8b843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.583113] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1055.596518] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4d692986-413f-4c9b-b5cc-de43d2ca498d could not be found. [ 1055.596710] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1055.596887] env[68194]: INFO nova.compute.manager [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1055.597166] env[68194]: DEBUG oslo.service.loopingcall [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1055.599483] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8216e910-66b8-4147-a264-93e7eeefc7da has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.600893] env[68194]: DEBUG nova.compute.manager [-] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1055.601026] env[68194]: DEBUG nova.network.neutron [-] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1055.610932] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ce0f1886-189f-4ab3-9ed6-376dce542f5f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.612751] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2c677f55-1024-4ed0-a2c4-7da6f53c3696 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Lock "1f155fbf-6460-4426-b6cf-176d44415eee" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 239.100s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1055.621817] env[68194]: DEBUG nova.compute.manager [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1055.625746] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e9321912-2500-406b-b504-7668258a0c00 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.635833] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8b885df7-e241-452a-bcaa-861b491a6ee0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.646609] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 337db74c-cea1-4760-a06e-c33cfb4d1de9 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.661222] env[68194]: DEBUG nova.network.neutron [-] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1055.668334] env[68194]: INFO nova.compute.manager [-] [instance: 4d692986-413f-4c9b-b5cc-de43d2ca498d] Took 0.07 seconds to deallocate network for instance. [ 1055.688746] env[68194]: DEBUG oslo_concurrency.lockutils [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1055.690192] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fd693e84-5f26-4382-af13-d703dbbee894 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.701147] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e683863a-2b50-4681-a192-6955dc36562b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.713402] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance f81311b2-917b-425b-8ad9-627f08548402 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.725094] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fb4dd17b-dc02-4086-b450-9449212ed7b3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.734626] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 9d6026ba-70bf-4824-a23d-434d63e5bb85 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.749551] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4c2b5eb3-9dcc-4499-9242-209289723719 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.761469] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 24c3932c-dced-4218-8a64-a3183ffc82f1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.778556] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e0c4a464-c6a2-4ae6-a210-55e4d942f641 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378 tempest-FloatingIPsAssociationNegativeTestJSON-2037562378-project-member] Lock "4d692986-413f-4c9b-b5cc-de43d2ca498d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.229s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1055.783156] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.793846] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d42d0fa9-08f2-40d9-958f-775e55fb0ea1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.804184] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.813713] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcb53c97-8d95-4d67-b310-d19087b0b298 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.824164] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance f141f989-f0c8-4943-982d-adf499342ec3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.834849] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 811aeb6b-eb94-4618-a0da-f391184cbd70 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.844576] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 554a4ee3-092b-443a-99fc-63d9d753c8ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.854265] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b5596341-1994-4187-a240-7e02d1534ea0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.863483] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 60be602e-d9e3-4f0f-972e-e19acbb3813e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.873866] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ef28d606-6b14-4fd1-9768-4dfc90b06fd5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.884962] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ec03aeaf-11e6-456f-b408-77557f77645b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.895040] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1ef2d43f-9e6f-4354-91c5-9e1155c2a382 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.905227] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 6ee32532-e88c-4eb6-9e3f-c1ea42a4d560 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1055.905492] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1055.905704] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1056.327839] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90600482-d9c6-419b-9a33-63c4304c79d3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.335468] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ef9420eb-ae82-4102-acac-b3f746a7a69c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.367028] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0691701b-3c46-43c0-8efe-eebf065c478f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.375277] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea6c5426-06b9-4421-bdaf-13d8ab6b1fec {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1056.388822] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1056.400890] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1056.418089] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1056.418692] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.917s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1056.418961] env[68194]: DEBUG oslo_concurrency.lockutils [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.730s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1056.420528] env[68194]: INFO nova.compute.claims [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1056.451959] env[68194]: DEBUG oslo_concurrency.lockutils [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.033s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1056.452796] env[68194]: DEBUG nova.compute.utils [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Instance 8216e910-66b8-4147-a264-93e7eeefc7da could not be found. {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1056.454408] env[68194]: DEBUG nova.compute.manager [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Instance disappeared during build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2487}} [ 1056.454592] env[68194]: DEBUG nova.compute.manager [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1056.454795] env[68194]: DEBUG oslo_concurrency.lockutils [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquiring lock "refresh_cache-8216e910-66b8-4147-a264-93e7eeefc7da" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1056.454965] env[68194]: DEBUG oslo_concurrency.lockutils [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Acquired lock "refresh_cache-8216e910-66b8-4147-a264-93e7eeefc7da" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1056.455141] env[68194]: DEBUG nova.network.neutron [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1056.462825] env[68194]: DEBUG nova.compute.utils [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Can not refresh info_cache because instance was not found {{(pid=68194) refresh_info_cache_for_instance /opt/stack/nova/nova/compute/utils.py:1010}} [ 1056.485458] env[68194]: DEBUG nova.network.neutron [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1056.567204] env[68194]: DEBUG nova.network.neutron [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.578159] env[68194]: DEBUG oslo_concurrency.lockutils [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Releasing lock "refresh_cache-8216e910-66b8-4147-a264-93e7eeefc7da" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1056.578432] env[68194]: DEBUG nova.compute.manager [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1056.578640] env[68194]: DEBUG nova.compute.manager [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1056.579119] env[68194]: DEBUG nova.network.neutron [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1056.597365] env[68194]: DEBUG nova.network.neutron [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1056.604477] env[68194]: DEBUG nova.network.neutron [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1056.614953] env[68194]: INFO nova.compute.manager [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] [instance: 8216e910-66b8-4147-a264-93e7eeefc7da] Took 0.04 seconds to deallocate network for instance. [ 1056.672231] env[68194]: DEBUG oslo_concurrency.lockutils [None req-22e5e969-4339-4816-b0e7-0c693264b3fe tempest-DeleteServersAdminTestJSON-710170354 tempest-DeleteServersAdminTestJSON-710170354-project-member] Lock "8216e910-66b8-4147-a264-93e7eeefc7da" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.668s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1056.686156] env[68194]: DEBUG nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1056.746824] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1056.747198] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1056.748766] env[68194]: INFO nova.compute.claims [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1057.285502] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ad19e49-cc49-4b6d-9df7-527f07320ce4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.294205] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11e7c979-cd71-4438-962d-bef0fd6b3372 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.327856] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f048cf29-37fa-452f-bd44-2f5bd8b2ccc1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.336907] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-042d7096-bb80-43ee-a5a1-b344017d2897 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.351776] env[68194]: DEBUG nova.compute.provider_tree [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1057.361777] env[68194]: DEBUG nova.scheduler.client.report [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1057.381290] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.634s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1057.381846] env[68194]: DEBUG nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1057.423077] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.424248] env[68194]: DEBUG nova.compute.utils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1057.425729] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1057.426075] env[68194]: DEBUG nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1057.427034] env[68194]: DEBUG nova.network.neutron [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1057.436783] env[68194]: DEBUG nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1057.504555] env[68194]: DEBUG nova.policy [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c19267337f7d4812892fa16ddb578639', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e5420ac8a2ab44019b10f06656aba4ca', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1057.529510] env[68194]: DEBUG nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1057.562328] env[68194]: DEBUG nova.virt.hardware [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1057.562328] env[68194]: DEBUG nova.virt.hardware [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1057.562328] env[68194]: DEBUG nova.virt.hardware [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1057.562737] env[68194]: DEBUG nova.virt.hardware [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1057.563161] env[68194]: DEBUG nova.virt.hardware [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1057.563442] env[68194]: DEBUG nova.virt.hardware [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1057.563850] env[68194]: DEBUG nova.virt.hardware [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1057.564147] env[68194]: DEBUG nova.virt.hardware [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1057.564449] env[68194]: DEBUG nova.virt.hardware [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1057.564847] env[68194]: DEBUG nova.virt.hardware [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1057.567093] env[68194]: DEBUG nova.virt.hardware [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1057.567093] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfac3046-0c96-4a87-8e2d-169e90b9f939 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.577379] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e9e54c73-5237-4e6b-962b-d6335fe0551c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1057.866839] env[68194]: DEBUG nova.network.neutron [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Successfully created port: 5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1058.422069] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1058.422340] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1058.422391] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1058.446380] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1058.446535] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1058.446672] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: fe79ae03-c408-4d18-914e-e64065998663] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1058.446800] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1058.446928] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1058.447534] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1058.447722] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1058.447854] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1058.447988] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1058.448196] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1058.448336] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1058.613427] env[68194]: DEBUG nova.compute.manager [req-258abc82-d5f9-4080-adc6-b2081fe18a29 req-7995cd06-a329-4775-89a2-1e9b6c5c8d54 service nova] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Received event network-vif-plugged-5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1058.613723] env[68194]: DEBUG oslo_concurrency.lockutils [req-258abc82-d5f9-4080-adc6-b2081fe18a29 req-7995cd06-a329-4775-89a2-1e9b6c5c8d54 service nova] Acquiring lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1058.613857] env[68194]: DEBUG oslo_concurrency.lockutils [req-258abc82-d5f9-4080-adc6-b2081fe18a29 req-7995cd06-a329-4775-89a2-1e9b6c5c8d54 service nova] Lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1058.614038] env[68194]: DEBUG oslo_concurrency.lockutils [req-258abc82-d5f9-4080-adc6-b2081fe18a29 req-7995cd06-a329-4775-89a2-1e9b6c5c8d54 service nova] Lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1058.614211] env[68194]: DEBUG nova.compute.manager [req-258abc82-d5f9-4080-adc6-b2081fe18a29 req-7995cd06-a329-4775-89a2-1e9b6c5c8d54 service nova] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] No waiting events found dispatching network-vif-plugged-5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1058.614373] env[68194]: WARNING nova.compute.manager [req-258abc82-d5f9-4080-adc6-b2081fe18a29 req-7995cd06-a329-4775-89a2-1e9b6c5c8d54 service nova] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Received unexpected event network-vif-plugged-5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71 for instance with vm_state building and task_state spawning. [ 1058.635840] env[68194]: DEBUG nova.network.neutron [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Successfully updated port: 5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1058.648671] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquiring lock "refresh_cache-ce0f1886-189f-4ab3-9ed6-376dce542f5f" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1058.648823] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquired lock "refresh_cache-ce0f1886-189f-4ab3-9ed6-376dce542f5f" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1058.649072] env[68194]: DEBUG nova.network.neutron [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1058.692214] env[68194]: DEBUG nova.network.neutron [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1058.873103] env[68194]: DEBUG nova.network.neutron [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Updating instance_info_cache with network_info: [{"id": "5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71", "address": "fa:16:3e:db:28:0e", "network": {"id": "a5b6e8ca-a51d-4cab-9cd0-d0c6acfcbb46", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-533279881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5420ac8a2ab44019b10f06656aba4ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fe63b7a-3f", "ovs_interfaceid": "5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1058.890069] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Releasing lock "refresh_cache-ce0f1886-189f-4ab3-9ed6-376dce542f5f" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1058.890414] env[68194]: DEBUG nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Instance network_info: |[{"id": "5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71", "address": "fa:16:3e:db:28:0e", "network": {"id": "a5b6e8ca-a51d-4cab-9cd0-d0c6acfcbb46", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-533279881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5420ac8a2ab44019b10f06656aba4ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fe63b7a-3f", "ovs_interfaceid": "5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1058.890830] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:28:0e', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'aa8c2f93-f287-41b3-adb6-4942a7ea2a0b', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1058.902062] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Creating folder: Project (e5420ac8a2ab44019b10f06656aba4ca). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1058.902918] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12b1da00-66b0-4399-9940-323da80b6181 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.916655] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Created folder: Project (e5420ac8a2ab44019b10f06656aba4ca) in parent group-v692426. [ 1058.916861] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Creating folder: Instances. Parent ref: group-v692491. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1058.917152] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a6747ba2-5cac-46dc-ba43-84921c73e9e1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.928613] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Created folder: Instances in parent group-v692491. [ 1058.928855] env[68194]: DEBUG oslo.service.loopingcall [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1058.929054] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1058.929285] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f6191c2d-96dd-4344-9a50-b706f307898c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1058.951533] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1058.951533] env[68194]: value = "task-3466846" [ 1058.951533] env[68194]: _type = "Task" [ 1058.951533] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1058.959531] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466846, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.437489] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1059.465524] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466846, 'name': CreateVM_Task, 'duration_secs': 0.386121} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1059.465747] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1059.466529] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1059.466701] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1059.467068] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1059.467341] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5224e5ad-daeb-43ba-81a9-625f209f6c47 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1059.472553] env[68194]: DEBUG oslo_vmware.api [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Waiting for the task: (returnval){ [ 1059.472553] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52e61f7f-a553-cc83-6492-339b8bde4998" [ 1059.472553] env[68194]: _type = "Task" [ 1059.472553] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1059.480575] env[68194]: DEBUG oslo_vmware.api [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52e61f7f-a553-cc83-6492-339b8bde4998, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1059.986355] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1059.986626] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1059.986850] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1060.681456] env[68194]: DEBUG nova.compute.manager [req-9c71eb67-c503-4528-bdcc-2b3e273817e0 req-72eaf48e-9eb6-475d-890d-a97d32bd591b service nova] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Received event network-changed-5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1060.681770] env[68194]: DEBUG nova.compute.manager [req-9c71eb67-c503-4528-bdcc-2b3e273817e0 req-72eaf48e-9eb6-475d-890d-a97d32bd591b service nova] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Refreshing instance network info cache due to event network-changed-5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1060.681926] env[68194]: DEBUG oslo_concurrency.lockutils [req-9c71eb67-c503-4528-bdcc-2b3e273817e0 req-72eaf48e-9eb6-475d-890d-a97d32bd591b service nova] Acquiring lock "refresh_cache-ce0f1886-189f-4ab3-9ed6-376dce542f5f" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1060.682077] env[68194]: DEBUG oslo_concurrency.lockutils [req-9c71eb67-c503-4528-bdcc-2b3e273817e0 req-72eaf48e-9eb6-475d-890d-a97d32bd591b service nova] Acquired lock "refresh_cache-ce0f1886-189f-4ab3-9ed6-376dce542f5f" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1060.682242] env[68194]: DEBUG nova.network.neutron [req-9c71eb67-c503-4528-bdcc-2b3e273817e0 req-72eaf48e-9eb6-475d-890d-a97d32bd591b service nova] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Refreshing network info cache for port 5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1061.323800] env[68194]: DEBUG nova.network.neutron [req-9c71eb67-c503-4528-bdcc-2b3e273817e0 req-72eaf48e-9eb6-475d-890d-a97d32bd591b service nova] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Updated VIF entry in instance network info cache for port 5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1061.323800] env[68194]: DEBUG nova.network.neutron [req-9c71eb67-c503-4528-bdcc-2b3e273817e0 req-72eaf48e-9eb6-475d-890d-a97d32bd591b service nova] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Updating instance_info_cache with network_info: [{"id": "5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71", "address": "fa:16:3e:db:28:0e", "network": {"id": "a5b6e8ca-a51d-4cab-9cd0-d0c6acfcbb46", "bridge": "br-int", "label": "tempest-SecurityGroupsTestJSON-533279881-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e5420ac8a2ab44019b10f06656aba4ca", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "aa8c2f93-f287-41b3-adb6-4942a7ea2a0b", "external-id": "nsx-vlan-transportzone-363", "segmentation_id": 363, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap5fe63b7a-3f", "ovs_interfaceid": "5fe63b7a-3fbc-45b7-a1b0-0dc23487ae71", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1061.336054] env[68194]: DEBUG oslo_concurrency.lockutils [req-9c71eb67-c503-4528-bdcc-2b3e273817e0 req-72eaf48e-9eb6-475d-890d-a97d32bd591b service nova] Releasing lock "refresh_cache-ce0f1886-189f-4ab3-9ed6-376dce542f5f" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1063.446577] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "b487291e-1b85-4064-9949-3d8895b6dcae" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1063.446886] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "b487291e-1b85-4064-9949-3d8895b6dcae" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1064.165984] env[68194]: DEBUG oslo_concurrency.lockutils [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquiring lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1076.168273] env[68194]: DEBUG oslo_concurrency.lockutils [None req-81e9ff32-fa68-4d50-a7a5-fff089a2a377 tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] Acquiring lock "939645a5-ef9a-4951-ada2-6bd95cec173f" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1076.168604] env[68194]: DEBUG oslo_concurrency.lockutils [None req-81e9ff32-fa68-4d50-a7a5-fff089a2a377 tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] Lock "939645a5-ef9a-4951-ada2-6bd95cec173f" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1087.685071] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2ca70a7a-7eb7-4054-a72e-286e2f119956 tempest-AttachVolumeShelveTestJSON-734519872 tempest-AttachVolumeShelveTestJSON-734519872-project-member] Acquiring lock "9a4d19c2-79b2-4323-a68a-6ba2c82e4d13" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1087.685071] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2ca70a7a-7eb7-4054-a72e-286e2f119956 tempest-AttachVolumeShelveTestJSON-734519872 tempest-AttachVolumeShelveTestJSON-734519872-project-member] Lock "9a4d19c2-79b2-4323-a68a-6ba2c82e4d13" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1088.510902] env[68194]: DEBUG oslo_concurrency.lockutils [None req-225f0657-6967-49b5-ad9a-85a982f49140 tempest-ServerRescueNegativeTestJSON-1253406653 tempest-ServerRescueNegativeTestJSON-1253406653-project-member] Acquiring lock "de2cd600-0b9a-45f2-a0e7-b78dfd52d0f7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1088.511145] env[68194]: DEBUG oslo_concurrency.lockutils [None req-225f0657-6967-49b5-ad9a-85a982f49140 tempest-ServerRescueNegativeTestJSON-1253406653 tempest-ServerRescueNegativeTestJSON-1253406653-project-member] Lock "de2cd600-0b9a-45f2-a0e7-b78dfd52d0f7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1089.097993] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5a739cbb-877a-4bf5-876f-760b16467ebd tempest-AttachVolumeTestJSON-1211419677 tempest-AttachVolumeTestJSON-1211419677-project-member] Acquiring lock "4982c984-c85f-4c23-b643-9ad8a7a4f405" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1089.098533] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5a739cbb-877a-4bf5-876f-760b16467ebd tempest-AttachVolumeTestJSON-1211419677 tempest-AttachVolumeTestJSON-1211419677-project-member] Lock "4982c984-c85f-4c23-b643-9ad8a7a4f405" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1089.879768] env[68194]: DEBUG oslo_concurrency.lockutils [None req-da20857b-6d94-4d17-9402-da6a623e2957 tempest-ServerRescueNegativeTestJSON-1253406653 tempest-ServerRescueNegativeTestJSON-1253406653-project-member] Acquiring lock "3f7dc638-fa6b-40b0-90dc-b4ddf0b2b99b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1089.880628] env[68194]: DEBUG oslo_concurrency.lockutils [None req-da20857b-6d94-4d17-9402-da6a623e2957 tempest-ServerRescueNegativeTestJSON-1253406653 tempest-ServerRescueNegativeTestJSON-1253406653-project-member] Lock "3f7dc638-fa6b-40b0-90dc-b4ddf0b2b99b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1096.069594] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b68fc678-ad22-4cf7-a1cf-02ce687f8436 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] Acquiring lock "4635c03b-1415-4ad9-8825-8997c68ad9e7" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1096.069868] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b68fc678-ad22-4cf7-a1cf-02ce687f8436 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] Lock "4635c03b-1415-4ad9-8825-8997c68ad9e7" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1103.354807] env[68194]: WARNING oslo_vmware.rw_handles [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1103.354807] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1103.354807] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1103.354807] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1103.354807] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1103.354807] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1103.354807] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1103.354807] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1103.354807] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1103.354807] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1103.354807] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1103.354807] env[68194]: ERROR oslo_vmware.rw_handles [ 1103.355357] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/d3370e26-b256-40ce-8e11-d64633ce53dd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1103.357178] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1103.357426] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Copying Virtual Disk [datastore1] vmware_temp/d3370e26-b256-40ce-8e11-d64633ce53dd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/d3370e26-b256-40ce-8e11-d64633ce53dd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1103.357771] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c3537ba6-b722-401b-90fb-2aa11faf9848 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.366048] env[68194]: DEBUG oslo_vmware.api [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Waiting for the task: (returnval){ [ 1103.366048] env[68194]: value = "task-3466847" [ 1103.366048] env[68194]: _type = "Task" [ 1103.366048] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.374330] env[68194]: DEBUG oslo_vmware.api [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Task: {'id': task-3466847, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.877050] env[68194]: DEBUG oslo_vmware.exceptions [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1103.877050] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1103.877204] env[68194]: ERROR nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1103.877204] env[68194]: Faults: ['InvalidArgument'] [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Traceback (most recent call last): [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] yield resources [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] self.driver.spawn(context, instance, image_meta, [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] self._fetch_image_if_missing(context, vi) [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] image_cache(vi, tmp_image_ds_loc) [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] vm_util.copy_virtual_disk( [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] session._wait_for_task(vmdk_copy_task) [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] return self.wait_for_task(task_ref) [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] return evt.wait() [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] result = hub.switch() [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] return self.greenlet.switch() [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] self.f(*self.args, **self.kw) [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] raise exceptions.translate_fault(task_info.error) [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Faults: ['InvalidArgument'] [ 1103.877204] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] [ 1103.878166] env[68194]: INFO nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Terminating instance [ 1103.879066] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1103.879300] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1103.879911] env[68194]: DEBUG nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1103.880120] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1103.880339] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ac8a75d8-748b-451c-886a-b0b6f197a97d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.882595] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2bf6b518-87ec-42fa-884e-bd376bc83467 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.890012] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1103.890156] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-b5d212fb-c802-4be9-ae39-330262e17bca {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.892264] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1103.892434] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1103.893502] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a063f95f-23ba-4ac4-9596-3162dd1c181c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.898270] env[68194]: DEBUG oslo_vmware.api [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Waiting for the task: (returnval){ [ 1103.898270] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]527d419e-88a6-7075-86f7-259d2441e4e5" [ 1103.898270] env[68194]: _type = "Task" [ 1103.898270] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.905409] env[68194]: DEBUG oslo_vmware.api [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]527d419e-88a6-7075-86f7-259d2441e4e5, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1103.970484] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1103.970741] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1103.970929] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Deleting the datastore file [datastore1] b8aaf064-e8a6-444a-83cd-6a7e02b82f33 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1103.971231] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-568da05f-a90f-48c0-a698-86b8f59432e3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1103.977623] env[68194]: DEBUG oslo_vmware.api [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Waiting for the task: (returnval){ [ 1103.977623] env[68194]: value = "task-3466849" [ 1103.977623] env[68194]: _type = "Task" [ 1103.977623] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1103.985671] env[68194]: DEBUG oslo_vmware.api [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Task: {'id': task-3466849, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1104.408785] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1104.409117] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Creating directory with path [datastore1] vmware_temp/cf2f5aa2-bd2b-4777-8074-cd83db0ac94e/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1104.409322] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf67557b-1976-48df-82a7-5b02aeb2a642 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.421770] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Created directory with path [datastore1] vmware_temp/cf2f5aa2-bd2b-4777-8074-cd83db0ac94e/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1104.421974] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Fetch image to [datastore1] vmware_temp/cf2f5aa2-bd2b-4777-8074-cd83db0ac94e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1104.422187] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/cf2f5aa2-bd2b-4777-8074-cd83db0ac94e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1104.422944] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-738b2cc7-ba20-4cab-b543-33c28fa37146 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.429837] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06db986c-1a09-44ed-8240-2a6e466c4b40 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.438753] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50b7d979-1052-4c12-853a-7d52d0ab4599 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.470085] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecbe58b9-56d4-4912-a419-816aff028996 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.475600] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a881d7ec-bbe1-4a18-a021-444e992fc781 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.485723] env[68194]: DEBUG oslo_vmware.api [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Task: {'id': task-3466849, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.083313} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1104.485946] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1104.486146] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1104.486320] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1104.486505] env[68194]: INFO nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1104.488672] env[68194]: DEBUG nova.compute.claims [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1104.488843] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1104.489075] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1104.504348] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1104.556124] env[68194]: DEBUG oslo_vmware.rw_handles [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cf2f5aa2-bd2b-4777-8074-cd83db0ac94e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1104.614741] env[68194]: DEBUG oslo_vmware.rw_handles [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1104.614910] env[68194]: DEBUG oslo_vmware.rw_handles [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/cf2f5aa2-bd2b-4777-8074-cd83db0ac94e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1104.920491] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f206119-96cd-4735-a6db-8aaf4b2e4b4a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.927920] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8699cd2f-abdc-41cf-90af-ec3530177d83 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.956644] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa1ae748-3ffd-4d53-883a-a47e44ba925e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.963713] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4f08bb0f-c6b0-4662-ad4a-e0f94e84f42d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1104.977466] env[68194]: DEBUG nova.compute.provider_tree [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1104.985683] env[68194]: DEBUG nova.scheduler.client.report [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1104.998769] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.510s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1104.999305] env[68194]: ERROR nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1104.999305] env[68194]: Faults: ['InvalidArgument'] [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Traceback (most recent call last): [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] self.driver.spawn(context, instance, image_meta, [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] self._fetch_image_if_missing(context, vi) [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] image_cache(vi, tmp_image_ds_loc) [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] vm_util.copy_virtual_disk( [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] session._wait_for_task(vmdk_copy_task) [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] return self.wait_for_task(task_ref) [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] return evt.wait() [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] result = hub.switch() [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] return self.greenlet.switch() [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] self.f(*self.args, **self.kw) [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] raise exceptions.translate_fault(task_info.error) [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Faults: ['InvalidArgument'] [ 1104.999305] env[68194]: ERROR nova.compute.manager [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] [ 1105.000948] env[68194]: DEBUG nova.compute.utils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1105.001342] env[68194]: DEBUG nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Build of instance b8aaf064-e8a6-444a-83cd-6a7e02b82f33 was re-scheduled: A specified parameter was not correct: fileType [ 1105.001342] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1105.001710] env[68194]: DEBUG nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1105.001885] env[68194]: DEBUG nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1105.002076] env[68194]: DEBUG nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1105.002247] env[68194]: DEBUG nova.network.neutron [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1105.301148] env[68194]: DEBUG nova.network.neutron [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.314624] env[68194]: INFO nova.compute.manager [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Took 0.31 seconds to deallocate network for instance. [ 1105.398564] env[68194]: INFO nova.scheduler.client.report [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Deleted allocations for instance b8aaf064-e8a6-444a-83cd-6a7e02b82f33 [ 1105.422022] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e5491054-ef7f-476c-ac43-03a4883c770c tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Lock "b8aaf064-e8a6-444a-83cd-6a7e02b82f33" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 476.339s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1105.423756] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Lock "b8aaf064-e8a6-444a-83cd-6a7e02b82f33" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 277.511s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1105.424060] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Acquiring lock "b8aaf064-e8a6-444a-83cd-6a7e02b82f33-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1105.424323] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Lock "b8aaf064-e8a6-444a-83cd-6a7e02b82f33-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1105.424539] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Lock "b8aaf064-e8a6-444a-83cd-6a7e02b82f33-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1105.426571] env[68194]: INFO nova.compute.manager [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Terminating instance [ 1105.428678] env[68194]: DEBUG nova.compute.manager [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1105.428821] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1105.429026] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-503ef638-1a55-40fc-a138-b2538caa9543 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.439332] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d03ea6f7-5f96-484d-a555-4e944f7f370d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1105.450558] env[68194]: DEBUG nova.compute.manager [None req-c89a6692-ac2b-41e8-b3da-4b7f140ee944 tempest-InstanceActionsTestJSON-822727360 tempest-InstanceActionsTestJSON-822727360-project-member] [instance: e9321912-2500-406b-b504-7668258a0c00] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1105.471031] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b8aaf064-e8a6-444a-83cd-6a7e02b82f33 could not be found. [ 1105.471031] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1105.471173] env[68194]: INFO nova.compute.manager [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1105.471418] env[68194]: DEBUG oslo.service.loopingcall [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1105.471668] env[68194]: DEBUG nova.compute.manager [-] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1105.471781] env[68194]: DEBUG nova.network.neutron [-] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1105.475260] env[68194]: DEBUG nova.compute.manager [None req-c89a6692-ac2b-41e8-b3da-4b7f140ee944 tempest-InstanceActionsTestJSON-822727360 tempest-InstanceActionsTestJSON-822727360-project-member] [instance: e9321912-2500-406b-b504-7668258a0c00] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1105.494535] env[68194]: DEBUG nova.network.neutron [-] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1105.497377] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c89a6692-ac2b-41e8-b3da-4b7f140ee944 tempest-InstanceActionsTestJSON-822727360 tempest-InstanceActionsTestJSON-822727360-project-member] Lock "e9321912-2500-406b-b504-7668258a0c00" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.902s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1105.503215] env[68194]: INFO nova.compute.manager [-] [instance: b8aaf064-e8a6-444a-83cd-6a7e02b82f33] Took 0.03 seconds to deallocate network for instance. [ 1105.508544] env[68194]: DEBUG nova.compute.manager [None req-2487242f-95ba-4be7-b661-03acd89e1c3a tempest-ServersTestManualDisk-529272396 tempest-ServersTestManualDisk-529272396-project-member] [instance: 8b885df7-e241-452a-bcaa-861b491a6ee0] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1105.536136] env[68194]: DEBUG nova.compute.manager [None req-2487242f-95ba-4be7-b661-03acd89e1c3a tempest-ServersTestManualDisk-529272396 tempest-ServersTestManualDisk-529272396-project-member] [instance: 8b885df7-e241-452a-bcaa-861b491a6ee0] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1105.559365] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2487242f-95ba-4be7-b661-03acd89e1c3a tempest-ServersTestManualDisk-529272396 tempest-ServersTestManualDisk-529272396-project-member] Lock "8b885df7-e241-452a-bcaa-861b491a6ee0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 237.927s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1105.567853] env[68194]: DEBUG nova.compute.manager [None req-c4efa6ad-e1ec-4341-bf44-7023d1b4c497 tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] [instance: 337db74c-cea1-4760-a06e-c33cfb4d1de9] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1105.604945] env[68194]: DEBUG nova.compute.manager [None req-c4efa6ad-e1ec-4341-bf44-7023d1b4c497 tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] [instance: 337db74c-cea1-4760-a06e-c33cfb4d1de9] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1105.622255] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0cb10cbf-0610-4aa2-b16d-4ff7fd921ea3 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Lock "b8aaf064-e8a6-444a-83cd-6a7e02b82f33" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.198s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1105.626948] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c4efa6ad-e1ec-4341-bf44-7023d1b4c497 tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] Lock "337db74c-cea1-4760-a06e-c33cfb4d1de9" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 229.557s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1105.637213] env[68194]: DEBUG nova.compute.manager [None req-1792064f-ebc9-40bd-8412-94c4ca6e15f2 tempest-ServerMetadataTestJSON-21535063 tempest-ServerMetadataTestJSON-21535063-project-member] [instance: fd693e84-5f26-4382-af13-d703dbbee894] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1105.661557] env[68194]: DEBUG nova.compute.manager [None req-1792064f-ebc9-40bd-8412-94c4ca6e15f2 tempest-ServerMetadataTestJSON-21535063 tempest-ServerMetadataTestJSON-21535063-project-member] [instance: fd693e84-5f26-4382-af13-d703dbbee894] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1105.684516] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1792064f-ebc9-40bd-8412-94c4ca6e15f2 tempest-ServerMetadataTestJSON-21535063 tempest-ServerMetadataTestJSON-21535063-project-member] Lock "fd693e84-5f26-4382-af13-d703dbbee894" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.425s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1105.694154] env[68194]: DEBUG nova.compute.manager [None req-eab53a20-b0e5-4afd-85f2-2f4cc00f4f58 tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] [instance: e683863a-2b50-4681-a192-6955dc36562b] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1105.718184] env[68194]: DEBUG nova.compute.manager [None req-eab53a20-b0e5-4afd-85f2-2f4cc00f4f58 tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] [instance: e683863a-2b50-4681-a192-6955dc36562b] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1105.738635] env[68194]: DEBUG oslo_concurrency.lockutils [None req-eab53a20-b0e5-4afd-85f2-2f4cc00f4f58 tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] Lock "e683863a-2b50-4681-a192-6955dc36562b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.425s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1105.749084] env[68194]: DEBUG nova.compute.manager [None req-eab53a20-b0e5-4afd-85f2-2f4cc00f4f58 tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] [instance: f81311b2-917b-425b-8ad9-627f08548402] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1105.772945] env[68194]: DEBUG nova.compute.manager [None req-eab53a20-b0e5-4afd-85f2-2f4cc00f4f58 tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] [instance: f81311b2-917b-425b-8ad9-627f08548402] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1105.796051] env[68194]: DEBUG oslo_concurrency.lockutils [None req-eab53a20-b0e5-4afd-85f2-2f4cc00f4f58 tempest-MultipleCreateTestJSON-367536536 tempest-MultipleCreateTestJSON-367536536-project-member] Lock "f81311b2-917b-425b-8ad9-627f08548402" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 226.450s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1105.806039] env[68194]: DEBUG nova.compute.manager [None req-351e913d-77f6-41ff-ac1d-c435043d9c91 tempest-ServerTagsTestJSON-1095445404 tempest-ServerTagsTestJSON-1095445404-project-member] [instance: fb4dd17b-dc02-4086-b450-9449212ed7b3] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1105.831024] env[68194]: DEBUG nova.compute.manager [None req-351e913d-77f6-41ff-ac1d-c435043d9c91 tempest-ServerTagsTestJSON-1095445404 tempest-ServerTagsTestJSON-1095445404-project-member] [instance: fb4dd17b-dc02-4086-b450-9449212ed7b3] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1105.852295] env[68194]: DEBUG oslo_concurrency.lockutils [None req-351e913d-77f6-41ff-ac1d-c435043d9c91 tempest-ServerTagsTestJSON-1095445404 tempest-ServerTagsTestJSON-1095445404-project-member] Lock "fb4dd17b-dc02-4086-b450-9449212ed7b3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 222.470s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1105.861311] env[68194]: DEBUG nova.compute.manager [None req-76374ad8-8d6b-4fbe-9bd1-9aacf2c0cfa6 tempest-AttachVolumeShelveTestJSON-734519872 tempest-AttachVolumeShelveTestJSON-734519872-project-member] [instance: 9d6026ba-70bf-4824-a23d-434d63e5bb85] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1105.884845] env[68194]: DEBUG nova.compute.manager [None req-76374ad8-8d6b-4fbe-9bd1-9aacf2c0cfa6 tempest-AttachVolumeShelveTestJSON-734519872 tempest-AttachVolumeShelveTestJSON-734519872-project-member] [instance: 9d6026ba-70bf-4824-a23d-434d63e5bb85] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1105.906393] env[68194]: DEBUG oslo_concurrency.lockutils [None req-76374ad8-8d6b-4fbe-9bd1-9aacf2c0cfa6 tempest-AttachVolumeShelveTestJSON-734519872 tempest-AttachVolumeShelveTestJSON-734519872-project-member] Lock "9d6026ba-70bf-4824-a23d-434d63e5bb85" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 219.842s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1105.917753] env[68194]: DEBUG nova.compute.manager [None req-0bbd6e60-a10a-40c6-82f2-06b41e6470a1 tempest-ServerPasswordTestJSON-1857584348 tempest-ServerPasswordTestJSON-1857584348-project-member] [instance: 4c2b5eb3-9dcc-4499-9242-209289723719] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1105.942571] env[68194]: DEBUG nova.compute.manager [None req-0bbd6e60-a10a-40c6-82f2-06b41e6470a1 tempest-ServerPasswordTestJSON-1857584348 tempest-ServerPasswordTestJSON-1857584348-project-member] [instance: 4c2b5eb3-9dcc-4499-9242-209289723719] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1105.965729] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0bbd6e60-a10a-40c6-82f2-06b41e6470a1 tempest-ServerPasswordTestJSON-1857584348 tempest-ServerPasswordTestJSON-1857584348-project-member] Lock "4c2b5eb3-9dcc-4499-9242-209289723719" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 216.429s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1105.974973] env[68194]: DEBUG nova.compute.manager [None req-ef6bb795-8214-4a7b-bd6d-fcde051a5a16 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] [instance: 24c3932c-dced-4218-8a64-a3183ffc82f1] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1105.998804] env[68194]: DEBUG nova.compute.manager [None req-ef6bb795-8214-4a7b-bd6d-fcde051a5a16 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] [instance: 24c3932c-dced-4218-8a64-a3183ffc82f1] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1106.018211] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef6bb795-8214-4a7b-bd6d-fcde051a5a16 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] Lock "24c3932c-dced-4218-8a64-a3183ffc82f1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.997s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1106.027030] env[68194]: DEBUG nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1106.082308] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1106.082691] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1106.084503] env[68194]: INFO nova.compute.claims [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1106.464688] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2e63c25-a348-4355-9f1f-98f291e73699 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.472652] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-050f564c-e8a2-4b1a-b312-b2302126b3e5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.501447] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f95f9bdc-1c41-4d4e-9c39-3a050e17f24a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.508453] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77dbcda6-fce0-4357-8809-90b13164f7ad {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.521160] env[68194]: DEBUG nova.compute.provider_tree [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1106.529668] env[68194]: DEBUG nova.scheduler.client.report [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1106.544868] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.462s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1106.545339] env[68194]: DEBUG nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1106.578706] env[68194]: DEBUG nova.compute.utils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1106.580460] env[68194]: DEBUG nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1106.580460] env[68194]: DEBUG nova.network.neutron [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1106.590163] env[68194]: DEBUG nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1106.645617] env[68194]: DEBUG nova.policy [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31d74bfb8537483e8adf50b60f8b635a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'defbdc952aac495caf13c7cc9ead3a53', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1106.654056] env[68194]: DEBUG nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1106.679175] env[68194]: DEBUG nova.virt.hardware [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1106.679423] env[68194]: DEBUG nova.virt.hardware [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1106.679610] env[68194]: DEBUG nova.virt.hardware [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1106.679792] env[68194]: DEBUG nova.virt.hardware [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1106.679967] env[68194]: DEBUG nova.virt.hardware [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1106.680901] env[68194]: DEBUG nova.virt.hardware [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1106.680901] env[68194]: DEBUG nova.virt.hardware [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1106.680901] env[68194]: DEBUG nova.virt.hardware [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1106.680901] env[68194]: DEBUG nova.virt.hardware [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1106.680901] env[68194]: DEBUG nova.virt.hardware [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1106.681135] env[68194]: DEBUG nova.virt.hardware [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1106.681796] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-89655aa0-80b0-40ce-b450-36e4c45af526 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.689460] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8051e5d-2045-4f0e-9952-db8f549838f2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1106.962869] env[68194]: DEBUG nova.network.neutron [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Successfully created port: 214d053d-815a-49bf-960e-b4ccba2c47e8 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1107.479468] env[68194]: DEBUG nova.compute.manager [req-8687dc67-b709-4638-a47f-23abf498d1e7 req-8b18d55c-e5a0-419e-915a-da395059672e service nova] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Received event network-vif-plugged-214d053d-815a-49bf-960e-b4ccba2c47e8 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1107.479740] env[68194]: DEBUG oslo_concurrency.lockutils [req-8687dc67-b709-4638-a47f-23abf498d1e7 req-8b18d55c-e5a0-419e-915a-da395059672e service nova] Acquiring lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1107.479964] env[68194]: DEBUG oslo_concurrency.lockutils [req-8687dc67-b709-4638-a47f-23abf498d1e7 req-8b18d55c-e5a0-419e-915a-da395059672e service nova] Lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1107.480156] env[68194]: DEBUG oslo_concurrency.lockutils [req-8687dc67-b709-4638-a47f-23abf498d1e7 req-8b18d55c-e5a0-419e-915a-da395059672e service nova] Lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1107.480244] env[68194]: DEBUG nova.compute.manager [req-8687dc67-b709-4638-a47f-23abf498d1e7 req-8b18d55c-e5a0-419e-915a-da395059672e service nova] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] No waiting events found dispatching network-vif-plugged-214d053d-815a-49bf-960e-b4ccba2c47e8 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1107.480379] env[68194]: WARNING nova.compute.manager [req-8687dc67-b709-4638-a47f-23abf498d1e7 req-8b18d55c-e5a0-419e-915a-da395059672e service nova] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Received unexpected event network-vif-plugged-214d053d-815a-49bf-960e-b4ccba2c47e8 for instance with vm_state building and task_state spawning. [ 1107.561092] env[68194]: DEBUG nova.network.neutron [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Successfully updated port: 214d053d-815a-49bf-960e-b4ccba2c47e8 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1107.573917] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "refresh_cache-47cc57d7-40db-4a19-a983-f4e9ea9e8984" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1107.574078] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquired lock "refresh_cache-47cc57d7-40db-4a19-a983-f4e9ea9e8984" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1107.574234] env[68194]: DEBUG nova.network.neutron [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1107.617284] env[68194]: DEBUG nova.network.neutron [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1107.770785] env[68194]: DEBUG nova.network.neutron [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Updating instance_info_cache with network_info: [{"id": "214d053d-815a-49bf-960e-b4ccba2c47e8", "address": "fa:16:3e:db:e8:c2", "network": {"id": "58e5fa41-0570-4409-8c1e-7c52fd2c7a7a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1202426316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "defbdc952aac495caf13c7cc9ead3a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap214d053d-81", "ovs_interfaceid": "214d053d-815a-49bf-960e-b4ccba2c47e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1107.783044] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Releasing lock "refresh_cache-47cc57d7-40db-4a19-a983-f4e9ea9e8984" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1107.783351] env[68194]: DEBUG nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Instance network_info: |[{"id": "214d053d-815a-49bf-960e-b4ccba2c47e8", "address": "fa:16:3e:db:e8:c2", "network": {"id": "58e5fa41-0570-4409-8c1e-7c52fd2c7a7a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1202426316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "defbdc952aac495caf13c7cc9ead3a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap214d053d-81", "ovs_interfaceid": "214d053d-815a-49bf-960e-b4ccba2c47e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1107.783725] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:db:e8:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '214d053d-815a-49bf-960e-b4ccba2c47e8', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1107.791259] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Creating folder: Project (defbdc952aac495caf13c7cc9ead3a53). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1107.791774] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c1db6dc6-4c9f-4842-aead-16e4740c36a8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.804153] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Created folder: Project (defbdc952aac495caf13c7cc9ead3a53) in parent group-v692426. [ 1107.804341] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Creating folder: Instances. Parent ref: group-v692494. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1107.804562] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d93acab4-332b-4854-945f-4ca810b0df72 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.813519] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Created folder: Instances in parent group-v692494. [ 1107.813714] env[68194]: DEBUG oslo.service.loopingcall [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1107.813889] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1107.814088] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51b7afb1-a949-4bd1-b368-003f3ad6a66a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1107.831918] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1107.831918] env[68194]: value = "task-3466852" [ 1107.831918] env[68194]: _type = "Task" [ 1107.831918] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1107.840192] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466852, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.341412] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466852, 'name': CreateVM_Task, 'duration_secs': 0.292993} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1108.341587] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1108.342261] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1108.342430] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1108.342761] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1108.343019] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-c3d11a96-5e1c-475e-9947-df100f78b3b9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1108.347488] env[68194]: DEBUG oslo_vmware.api [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Waiting for the task: (returnval){ [ 1108.347488] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52f59b7b-49bc-5fe5-6bc6-f96be44a3714" [ 1108.347488] env[68194]: _type = "Task" [ 1108.347488] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1108.354996] env[68194]: DEBUG oslo_vmware.api [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52f59b7b-49bc-5fe5-6bc6-f96be44a3714, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1108.858312] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1108.858638] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1108.858766] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1109.505702] env[68194]: DEBUG nova.compute.manager [req-430017ce-0ce6-4d9b-8d08-fc0463401d7a req-5eb5acf7-a6b5-4d03-9ea8-95c66ee5c498 service nova] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Received event network-changed-214d053d-815a-49bf-960e-b4ccba2c47e8 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1109.505907] env[68194]: DEBUG nova.compute.manager [req-430017ce-0ce6-4d9b-8d08-fc0463401d7a req-5eb5acf7-a6b5-4d03-9ea8-95c66ee5c498 service nova] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Refreshing instance network info cache due to event network-changed-214d053d-815a-49bf-960e-b4ccba2c47e8. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1109.506142] env[68194]: DEBUG oslo_concurrency.lockutils [req-430017ce-0ce6-4d9b-8d08-fc0463401d7a req-5eb5acf7-a6b5-4d03-9ea8-95c66ee5c498 service nova] Acquiring lock "refresh_cache-47cc57d7-40db-4a19-a983-f4e9ea9e8984" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1109.506288] env[68194]: DEBUG oslo_concurrency.lockutils [req-430017ce-0ce6-4d9b-8d08-fc0463401d7a req-5eb5acf7-a6b5-4d03-9ea8-95c66ee5c498 service nova] Acquired lock "refresh_cache-47cc57d7-40db-4a19-a983-f4e9ea9e8984" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1109.506449] env[68194]: DEBUG nova.network.neutron [req-430017ce-0ce6-4d9b-8d08-fc0463401d7a req-5eb5acf7-a6b5-4d03-9ea8-95c66ee5c498 service nova] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Refreshing network info cache for port 214d053d-815a-49bf-960e-b4ccba2c47e8 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1109.758712] env[68194]: DEBUG nova.network.neutron [req-430017ce-0ce6-4d9b-8d08-fc0463401d7a req-5eb5acf7-a6b5-4d03-9ea8-95c66ee5c498 service nova] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Updated VIF entry in instance network info cache for port 214d053d-815a-49bf-960e-b4ccba2c47e8. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1109.758778] env[68194]: DEBUG nova.network.neutron [req-430017ce-0ce6-4d9b-8d08-fc0463401d7a req-5eb5acf7-a6b5-4d03-9ea8-95c66ee5c498 service nova] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Updating instance_info_cache with network_info: [{"id": "214d053d-815a-49bf-960e-b4ccba2c47e8", "address": "fa:16:3e:db:e8:c2", "network": {"id": "58e5fa41-0570-4409-8c1e-7c52fd2c7a7a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1202426316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "defbdc952aac495caf13c7cc9ead3a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap214d053d-81", "ovs_interfaceid": "214d053d-815a-49bf-960e-b4ccba2c47e8", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1109.769534] env[68194]: DEBUG oslo_concurrency.lockutils [req-430017ce-0ce6-4d9b-8d08-fc0463401d7a req-5eb5acf7-a6b5-4d03-9ea8-95c66ee5c498 service nova] Releasing lock "refresh_cache-47cc57d7-40db-4a19-a983-f4e9ea9e8984" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1111.539544] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1114.416438] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1114.416438] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1115.416726] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1115.416995] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.415904] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.416233] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1116.431056] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1116.431421] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1116.431466] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1116.431625] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1116.433241] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55fd8453-8bbf-4f91-85cd-3357b04e8094 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.442633] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc47ac2a-4a2b-41d5-bbf8-6b85d97c46cd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.457634] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-03a0e001-89e8-47b0-827d-563a5518470c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.464928] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e90e7efe-6ff3-4fe4-81fa-eb27fdc4d259 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1116.494746] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180951MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1116.494911] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1116.495131] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1116.575786] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance e575e1c7-7f35-41de-96e7-0771a4137bf5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.575950] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fe79ae03-c408-4d18-914e-e64065998663 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.576104] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cef5dc8e-1a5c-4248-9bac-ff25880588ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.576233] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2243c245-bbb3-43b7-89a9-fb727d452885 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.576355] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.576474] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.576594] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ff16d7c1-a601-4ac6-be52-823727c8b843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.576711] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.576826] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ce0f1886-189f-4ab3-9ed6-376dce542f5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.576942] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1116.589302] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d42d0fa9-08f2-40d9-958f-775e55fb0ea1 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.602399] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.614557] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcb53c97-8d95-4d67-b310-d19087b0b298 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.625659] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance f141f989-f0c8-4943-982d-adf499342ec3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.638971] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 811aeb6b-eb94-4618-a0da-f391184cbd70 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.652022] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 554a4ee3-092b-443a-99fc-63d9d753c8ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.695116] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b5596341-1994-4187-a240-7e02d1534ea0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.708901] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 60be602e-d9e3-4f0f-972e-e19acbb3813e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.722163] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ef28d606-6b14-4fd1-9768-4dfc90b06fd5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.735851] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ec03aeaf-11e6-456f-b408-77557f77645b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.748805] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1ef2d43f-9e6f-4354-91c5-9e1155c2a382 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.760810] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 6ee32532-e88c-4eb6-9e3f-c1ea42a4d560 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.772465] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b487291e-1b85-4064-9949-3d8895b6dcae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.783565] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 939645a5-ef9a-4951-ada2-6bd95cec173f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.795055] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 9a4d19c2-79b2-4323-a68a-6ba2c82e4d13 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.806524] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance de2cd600-0b9a-45f2-a0e7-b78dfd52d0f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.818111] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4982c984-c85f-4c23-b643-9ad8a7a4f405 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.831089] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3f7dc638-fa6b-40b0-90dc-b4ddf0b2b99b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.843248] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4635c03b-1415-4ad9-8825-8997c68ad9e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1116.843518] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1116.843677] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1117.226093] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-82df3367-2c15-4cc1-817c-783ef86e8260 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.234591] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3298e775-3445-4f0e-9ddf-8ffebf7b5e9d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.266931] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec108161-585c-4dd2-960e-6a0fe1fda2a7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.274949] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e6fdf254-2f49-4e1a-b323-b1bf44cf8f6f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1117.289098] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1117.297950] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1117.313176] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1117.313389] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.818s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1118.313883] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1118.314166] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1119.411675] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1120.416318] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1120.416629] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1120.416629] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1120.440356] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1120.440518] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: fe79ae03-c408-4d18-914e-e64065998663] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1120.440656] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1120.440789] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1120.440921] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1120.441070] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1120.441196] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1120.441318] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1120.441438] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1120.441558] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1120.441689] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1150.499573] env[68194]: WARNING oslo_vmware.rw_handles [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1150.499573] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1150.499573] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1150.499573] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1150.499573] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1150.499573] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1150.499573] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1150.499573] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1150.499573] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1150.499573] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1150.499573] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1150.499573] env[68194]: ERROR oslo_vmware.rw_handles [ 1150.500262] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/cf2f5aa2-bd2b-4777-8074-cd83db0ac94e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1150.502434] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1150.502710] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Copying Virtual Disk [datastore1] vmware_temp/cf2f5aa2-bd2b-4777-8074-cd83db0ac94e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/cf2f5aa2-bd2b-4777-8074-cd83db0ac94e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1150.503042] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-7e7d2af0-a621-419b-88da-070db99d9831 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1150.511815] env[68194]: DEBUG oslo_vmware.api [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Waiting for the task: (returnval){ [ 1150.511815] env[68194]: value = "task-3466853" [ 1150.511815] env[68194]: _type = "Task" [ 1150.511815] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1150.520987] env[68194]: DEBUG oslo_vmware.api [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Task: {'id': task-3466853, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.022331] env[68194]: DEBUG oslo_vmware.exceptions [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1151.022654] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1151.023232] env[68194]: ERROR nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1151.023232] env[68194]: Faults: ['InvalidArgument'] [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Traceback (most recent call last): [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] yield resources [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] self.driver.spawn(context, instance, image_meta, [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] self._fetch_image_if_missing(context, vi) [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] image_cache(vi, tmp_image_ds_loc) [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] vm_util.copy_virtual_disk( [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] session._wait_for_task(vmdk_copy_task) [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] return self.wait_for_task(task_ref) [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] return evt.wait() [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] result = hub.switch() [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] return self.greenlet.switch() [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] self.f(*self.args, **self.kw) [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] raise exceptions.translate_fault(task_info.error) [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Faults: ['InvalidArgument'] [ 1151.023232] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] [ 1151.024113] env[68194]: INFO nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Terminating instance [ 1151.025170] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1151.025413] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1151.025664] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9d201cd2-5437-47e8-ab6d-c9946908a88d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.028255] env[68194]: DEBUG nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1151.028391] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1151.029124] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1234f860-3a93-42c2-a07a-4b39fcd74c2c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.036084] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1151.036335] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-9d89308d-35b2-429c-aa0c-4cea5e89791e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.038724] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1151.039964] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1151.039964] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-37d9d176-bc95-49c3-81b6-0ac9c49495ee {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.045031] env[68194]: DEBUG oslo_vmware.api [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Waiting for the task: (returnval){ [ 1151.045031] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]525104ba-bfea-c311-f68e-44a4fde76133" [ 1151.045031] env[68194]: _type = "Task" [ 1151.045031] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.056379] env[68194]: DEBUG oslo_vmware.api [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]525104ba-bfea-c311-f68e-44a4fde76133, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.109994] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1151.110206] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1151.110250] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Deleting the datastore file [datastore1] e575e1c7-7f35-41de-96e7-0771a4137bf5 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1151.110499] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-016a4c9e-453a-4545-a797-0f16a0829430 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.116072] env[68194]: DEBUG oslo_vmware.api [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Waiting for the task: (returnval){ [ 1151.116072] env[68194]: value = "task-3466855" [ 1151.116072] env[68194]: _type = "Task" [ 1151.116072] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1151.123813] env[68194]: DEBUG oslo_vmware.api [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Task: {'id': task-3466855, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1151.555465] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1151.555767] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Creating directory with path [datastore1] vmware_temp/a2c065a0-0f93-4e2d-8aa8-3be00087151d/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1151.555956] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-189bcbdf-37dc-4557-b276-7c198e4ab5a6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.568561] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Created directory with path [datastore1] vmware_temp/a2c065a0-0f93-4e2d-8aa8-3be00087151d/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1151.568748] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Fetch image to [datastore1] vmware_temp/a2c065a0-0f93-4e2d-8aa8-3be00087151d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1151.568915] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/a2c065a0-0f93-4e2d-8aa8-3be00087151d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1151.569702] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd21b1c5-c94e-4a0d-b41f-7a0fa32b719d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.577916] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d9cbabf1-0d7c-4b47-8f76-cfc92f3113d5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.586678] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d203b09f-3dd1-4e4c-beb2-407fee336a06 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.622967] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16152e6b-2e7b-4341-b28f-70017bbe9a54 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.631760] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2860cccb-d440-4285-b4c7-75e1185f4b35 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1151.633462] env[68194]: DEBUG oslo_vmware.api [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Task: {'id': task-3466855, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.072724} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1151.633682] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1151.633868] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1151.634049] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1151.634223] env[68194]: INFO nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1151.636633] env[68194]: DEBUG nova.compute.claims [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1151.636807] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1151.637027] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1151.655488] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1151.727497] env[68194]: DEBUG oslo_vmware.rw_handles [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a2c065a0-0f93-4e2d-8aa8-3be00087151d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1151.794238] env[68194]: DEBUG oslo_vmware.rw_handles [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1151.794238] env[68194]: DEBUG oslo_vmware.rw_handles [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a2c065a0-0f93-4e2d-8aa8-3be00087151d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1152.185880] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-149d66e7-5ac8-496f-8e0a-deb521798aba {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.194300] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-217a4cbe-2467-4e9f-9ad5-d87484662751 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.225281] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65553fea-ce43-4776-84e9-465e33adc205 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.232519] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a3c296-305a-4cb2-ae21-2c176132cb5a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.246291] env[68194]: DEBUG nova.compute.provider_tree [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1152.255608] env[68194]: DEBUG nova.scheduler.client.report [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1152.272528] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.635s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1152.272978] env[68194]: ERROR nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1152.272978] env[68194]: Faults: ['InvalidArgument'] [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Traceback (most recent call last): [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] self.driver.spawn(context, instance, image_meta, [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] self._fetch_image_if_missing(context, vi) [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] image_cache(vi, tmp_image_ds_loc) [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] vm_util.copy_virtual_disk( [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] session._wait_for_task(vmdk_copy_task) [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] return self.wait_for_task(task_ref) [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] return evt.wait() [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] result = hub.switch() [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] return self.greenlet.switch() [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] self.f(*self.args, **self.kw) [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] raise exceptions.translate_fault(task_info.error) [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Faults: ['InvalidArgument'] [ 1152.272978] env[68194]: ERROR nova.compute.manager [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] [ 1152.273897] env[68194]: DEBUG nova.compute.utils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1152.275109] env[68194]: DEBUG nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Build of instance e575e1c7-7f35-41de-96e7-0771a4137bf5 was re-scheduled: A specified parameter was not correct: fileType [ 1152.275109] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1152.275502] env[68194]: DEBUG nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1152.275690] env[68194]: DEBUG nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1152.277206] env[68194]: DEBUG nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1152.277206] env[68194]: DEBUG nova.network.neutron [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1152.618476] env[68194]: DEBUG nova.network.neutron [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.630673] env[68194]: INFO nova.compute.manager [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Took 0.35 seconds to deallocate network for instance. [ 1152.753730] env[68194]: INFO nova.scheduler.client.report [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Deleted allocations for instance e575e1c7-7f35-41de-96e7-0771a4137bf5 [ 1152.778704] env[68194]: DEBUG oslo_concurrency.lockutils [None req-bc2e865c-b91b-4466-bde0-8d16f625b70e tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Lock "e575e1c7-7f35-41de-96e7-0771a4137bf5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 516.579s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1152.779867] env[68194]: DEBUG oslo_concurrency.lockutils [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Lock "e575e1c7-7f35-41de-96e7-0771a4137bf5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 317.791s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1152.780098] env[68194]: DEBUG oslo_concurrency.lockutils [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Acquiring lock "e575e1c7-7f35-41de-96e7-0771a4137bf5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1152.780901] env[68194]: DEBUG oslo_concurrency.lockutils [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Lock "e575e1c7-7f35-41de-96e7-0771a4137bf5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1152.781272] env[68194]: DEBUG oslo_concurrency.lockutils [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Lock "e575e1c7-7f35-41de-96e7-0771a4137bf5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1152.783301] env[68194]: INFO nova.compute.manager [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Terminating instance [ 1152.784841] env[68194]: DEBUG nova.compute.manager [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1152.785054] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1152.785511] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-362f5a0b-d837-42a1-9a5e-5aae2ed7f64d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.798115] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16cc5673-e344-4dee-adf0-54f133e8d313 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1152.810276] env[68194]: DEBUG nova.compute.manager [None req-263ea940-b565-4867-a31f-26ebfff91aa9 tempest-ServerAddressesTestJSON-524673289 tempest-ServerAddressesTestJSON-524673289-project-member] [instance: d42d0fa9-08f2-40d9-958f-775e55fb0ea1] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1152.833035] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance e575e1c7-7f35-41de-96e7-0771a4137bf5 could not be found. [ 1152.833117] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1152.833276] env[68194]: INFO nova.compute.manager [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1152.833524] env[68194]: DEBUG oslo.service.loopingcall [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1152.834785] env[68194]: DEBUG nova.compute.manager [-] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1152.834785] env[68194]: DEBUG nova.network.neutron [-] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1152.840317] env[68194]: DEBUG nova.compute.manager [None req-263ea940-b565-4867-a31f-26ebfff91aa9 tempest-ServerAddressesTestJSON-524673289 tempest-ServerAddressesTestJSON-524673289-project-member] [instance: d42d0fa9-08f2-40d9-958f-775e55fb0ea1] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1152.860101] env[68194]: DEBUG nova.network.neutron [-] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1152.867631] env[68194]: DEBUG oslo_concurrency.lockutils [None req-263ea940-b565-4867-a31f-26ebfff91aa9 tempest-ServerAddressesTestJSON-524673289 tempest-ServerAddressesTestJSON-524673289-project-member] Lock "d42d0fa9-08f2-40d9-958f-775e55fb0ea1" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 230.470s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1152.871741] env[68194]: INFO nova.compute.manager [-] [instance: e575e1c7-7f35-41de-96e7-0771a4137bf5] Took 0.04 seconds to deallocate network for instance. [ 1152.878041] env[68194]: DEBUG nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1152.940547] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1152.940826] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1152.942705] env[68194]: INFO nova.compute.claims [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1153.023248] env[68194]: DEBUG oslo_concurrency.lockutils [None req-317cc521-fe06-41e6-969b-f99010a44bca tempest-InstanceActionsV221TestJSON-540488062 tempest-InstanceActionsV221TestJSON-540488062-project-member] Lock "e575e1c7-7f35-41de-96e7-0771a4137bf5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.243s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1153.329705] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec0486e3-2edb-4055-b808-639b0e7dcbaa {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.339015] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c61abe0a-1840-4620-9085-be3aeba8e05e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.371324] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1763f1eb-3c9d-4900-a28b-ec9df7045779 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.378868] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e42c1d8b-1f5c-4b88-9759-3fb50ab0b4d4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.393625] env[68194]: DEBUG nova.compute.provider_tree [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1153.402236] env[68194]: DEBUG nova.scheduler.client.report [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1153.417096] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.475s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1153.417451] env[68194]: DEBUG nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1153.462282] env[68194]: DEBUG nova.compute.utils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1153.462282] env[68194]: DEBUG nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Not allocating networking since 'none' was specified. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 1153.470222] env[68194]: DEBUG nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1153.546689] env[68194]: DEBUG nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1153.574968] env[68194]: DEBUG nova.virt.hardware [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1153.575229] env[68194]: DEBUG nova.virt.hardware [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1153.575390] env[68194]: DEBUG nova.virt.hardware [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1153.575569] env[68194]: DEBUG nova.virt.hardware [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1153.575717] env[68194]: DEBUG nova.virt.hardware [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1153.575877] env[68194]: DEBUG nova.virt.hardware [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1153.576180] env[68194]: DEBUG nova.virt.hardware [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1153.576355] env[68194]: DEBUG nova.virt.hardware [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1153.576527] env[68194]: DEBUG nova.virt.hardware [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1153.576694] env[68194]: DEBUG nova.virt.hardware [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1153.576862] env[68194]: DEBUG nova.virt.hardware [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1153.577744] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a353dc2-0fb6-41b6-bb81-d736b5dd99e3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.586397] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfc136ca-df99-4ff6-b17f-a6ab84098062 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.606388] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Instance VIF info [] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1153.611912] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Creating folder: Project (aa2d478b7bbe42328ab0d3c6057d6be0). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1153.612200] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-157474af-4989-4248-b4a7-d7e73eab5752 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.621933] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Created folder: Project (aa2d478b7bbe42328ab0d3c6057d6be0) in parent group-v692426. [ 1153.622159] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Creating folder: Instances. Parent ref: group-v692497. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1153.622386] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-32907436-d1d4-4e7a-90b4-2c598bda4b57 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.630480] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Created folder: Instances in parent group-v692497. [ 1153.630700] env[68194]: DEBUG oslo.service.loopingcall [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1153.630878] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1153.631072] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-8239895e-b1e0-4167-bfa9-9a3eb76e6620 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1153.646726] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1153.646726] env[68194]: value = "task-3466858" [ 1153.646726] env[68194]: _type = "Task" [ 1153.646726] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1153.653727] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466858, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.161091] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466858, 'name': CreateVM_Task, 'duration_secs': 0.297511} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1154.161091] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1154.161091] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1154.161091] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1154.161091] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1154.161091] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7a4d15cd-ba56-492d-965c-216e0fb96878 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1154.166052] env[68194]: DEBUG oslo_vmware.api [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Waiting for the task: (returnval){ [ 1154.166052] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52c20d78-853e-be43-8f73-cb6b6ffe42e9" [ 1154.166052] env[68194]: _type = "Task" [ 1154.166052] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1154.172803] env[68194]: DEBUG oslo_vmware.api [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52c20d78-853e-be43-8f73-cb6b6ffe42e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1154.676839] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1154.676839] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1154.676839] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1155.403081] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquiring lock "7ed8ac34-04a2-49fe-9429-f636ff6fff8a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1158.283029] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Acquiring lock "4fb56c2b-1556-479e-9d4e-136a8d1d15ad" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1158.283029] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Lock "4fb56c2b-1556-479e-9d4e-136a8d1d15ad" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1174.416887] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1174.417182] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1175.416609] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.416904] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.417143] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Cleaning up deleted instances {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1175.432137] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] There are 1 instances to clean {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1175.432427] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: a6920ad4-bf1c-4daa-9b9a-81e782c88a20] Instance has had 0 of 5 cleanup attempts {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11211}} [ 1175.469623] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1175.469808] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Cleaning up deleted instances with incomplete migration {{(pid=68194) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 1176.416720] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1176.452332] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1177.426826] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1178.416451] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1178.416708] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1178.416872] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1178.429108] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1178.429427] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1178.429619] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1178.429782] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1178.431289] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-737d32e4-b1ad-45a0-8993-ba86172a0953 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.441531] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a87c781-8cb7-4540-8898-3d15036af26b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.456221] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-189f3834-8332-4f45-89b0-1b156a7f2a56 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.462843] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa20ac0-7a3d-411a-a4eb-4ba5397f98ac {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1178.493009] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180970MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1178.493187] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1178.493415] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1178.633855] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fe79ae03-c408-4d18-914e-e64065998663 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.634123] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cef5dc8e-1a5c-4248-9bac-ff25880588ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.634267] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2243c245-bbb3-43b7-89a9-fb727d452885 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.634397] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.634519] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.634644] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ff16d7c1-a601-4ac6-be52-823727c8b843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.634763] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.634881] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ce0f1886-189f-4ab3-9ed6-376dce542f5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.634996] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.635125] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1178.647024] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcb53c97-8d95-4d67-b310-d19087b0b298 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.656489] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance f141f989-f0c8-4943-982d-adf499342ec3 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.665814] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 811aeb6b-eb94-4618-a0da-f391184cbd70 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 192, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.675703] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 554a4ee3-092b-443a-99fc-63d9d753c8ec has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.686168] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b5596341-1994-4187-a240-7e02d1534ea0 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.696037] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 60be602e-d9e3-4f0f-972e-e19acbb3813e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.707351] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ef28d606-6b14-4fd1-9768-4dfc90b06fd5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.717113] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ec03aeaf-11e6-456f-b408-77557f77645b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.726360] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1ef2d43f-9e6f-4354-91c5-9e1155c2a382 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.735329] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 6ee32532-e88c-4eb6-9e3f-c1ea42a4d560 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.745231] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b487291e-1b85-4064-9949-3d8895b6dcae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.755191] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 939645a5-ef9a-4951-ada2-6bd95cec173f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.764419] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 9a4d19c2-79b2-4323-a68a-6ba2c82e4d13 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.773983] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance de2cd600-0b9a-45f2-a0e7-b78dfd52d0f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.782986] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4982c984-c85f-4c23-b643-9ad8a7a4f405 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.792077] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3f7dc638-fa6b-40b0-90dc-b4ddf0b2b99b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.801866] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4635c03b-1415-4ad9-8825-8997c68ad9e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.810692] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1178.810970] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1178.811162] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1178.826944] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing inventories for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1178.840850] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Updating ProviderTree inventory for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1178.841048] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Updating inventory in ProviderTree for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1178.850862] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing aggregate associations for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5, aggregates: None {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1178.868239] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing trait associations for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1179.161017] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-70f27e95-31ba-472c-8f87-d1949d20edc9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.168746] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd9795c-fb65-4ef4-a2a5-76fdfa55a2b1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.200835] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cef8f517-4fae-403f-84d8-ae834e28df06 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.208058] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-72d2c141-8e17-4e5c-9c73-82cdefdebd09 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1179.220550] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1179.228948] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1179.242130] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1179.242314] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.749s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1180.237556] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1180.237893] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1180.416176] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1180.416392] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1180.416503] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1180.437861] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: fe79ae03-c408-4d18-914e-e64065998663] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1180.438154] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1180.438314] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1180.438446] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1180.438575] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1180.438698] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1180.438820] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1180.438941] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1180.439072] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1180.439194] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1180.439314] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1197.811077] env[68194]: WARNING oslo_vmware.rw_handles [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1197.811077] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1197.811077] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1197.811077] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1197.811077] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1197.811077] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1197.811077] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1197.811077] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1197.811077] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1197.811077] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1197.811077] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1197.811077] env[68194]: ERROR oslo_vmware.rw_handles [ 1197.811803] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/a2c065a0-0f93-4e2d-8aa8-3be00087151d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1197.813572] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1197.813828] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Copying Virtual Disk [datastore1] vmware_temp/a2c065a0-0f93-4e2d-8aa8-3be00087151d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/a2c065a0-0f93-4e2d-8aa8-3be00087151d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1197.814135] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-d334bb30-9850-4ec8-a9ff-1d9dcb97dcbe {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1197.822646] env[68194]: DEBUG oslo_vmware.api [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Waiting for the task: (returnval){ [ 1197.822646] env[68194]: value = "task-3466859" [ 1197.822646] env[68194]: _type = "Task" [ 1197.822646] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1197.831051] env[68194]: DEBUG oslo_vmware.api [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Task: {'id': task-3466859, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.333421] env[68194]: DEBUG oslo_vmware.exceptions [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1198.333713] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1198.334276] env[68194]: ERROR nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1198.334276] env[68194]: Faults: ['InvalidArgument'] [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] Traceback (most recent call last): [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] yield resources [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] self.driver.spawn(context, instance, image_meta, [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] self._fetch_image_if_missing(context, vi) [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] image_cache(vi, tmp_image_ds_loc) [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] vm_util.copy_virtual_disk( [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] session._wait_for_task(vmdk_copy_task) [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] return self.wait_for_task(task_ref) [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] return evt.wait() [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] result = hub.switch() [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] return self.greenlet.switch() [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] self.f(*self.args, **self.kw) [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] raise exceptions.translate_fault(task_info.error) [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] Faults: ['InvalidArgument'] [ 1198.334276] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] [ 1198.335253] env[68194]: INFO nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Terminating instance [ 1198.336626] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1198.336626] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1198.336626] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-51766508-cdd1-4a5e-97b8-ed175b3672e2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.338972] env[68194]: DEBUG nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1198.339191] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1198.339905] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb747df2-1376-4ff1-9b16-f7aebff3ea92 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.346811] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1198.347041] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0af74c63-bc2e-4482-832b-48e81575ba06 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.349320] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1198.349494] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1198.350440] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-fc0b7665-2f64-42b9-a537-0e0bffd76808 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.354974] env[68194]: DEBUG oslo_vmware.api [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Waiting for the task: (returnval){ [ 1198.354974] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52683ebf-62db-7824-609a-fa6a3741e29f" [ 1198.354974] env[68194]: _type = "Task" [ 1198.354974] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.362109] env[68194]: DEBUG oslo_vmware.api [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52683ebf-62db-7824-609a-fa6a3741e29f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.418642] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1198.419466] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1198.419466] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Deleting the datastore file [datastore1] fe79ae03-c408-4d18-914e-e64065998663 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1198.419622] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-93d7235b-7e8b-463f-9d36-f29d3a881e56 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.426334] env[68194]: DEBUG oslo_vmware.api [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Waiting for the task: (returnval){ [ 1198.426334] env[68194]: value = "task-3466861" [ 1198.426334] env[68194]: _type = "Task" [ 1198.426334] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1198.434742] env[68194]: DEBUG oslo_vmware.api [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Task: {'id': task-3466861, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1198.865315] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1198.865642] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Creating directory with path [datastore1] vmware_temp/b08a3924-6753-4d92-b923-a8ac83de395f/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1198.865888] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5a7f433d-0751-4d16-af29-bb61946cd416 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.878398] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Created directory with path [datastore1] vmware_temp/b08a3924-6753-4d92-b923-a8ac83de395f/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1198.878646] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Fetch image to [datastore1] vmware_temp/b08a3924-6753-4d92-b923-a8ac83de395f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1198.878840] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/b08a3924-6753-4d92-b923-a8ac83de395f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1198.879626] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd679d90-12f7-4ebe-adb1-962f8d70032f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.886676] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db74f6e7-673f-4df1-8859-021382d69749 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.895878] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e8e4ae6-e2af-4de5-88d5-5d4042d743ef {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.926434] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e88eea72-35a3-4fae-853a-3e912df0453b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.937855] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-bdf7c29c-3365-422a-996a-156209711bec {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1198.939660] env[68194]: DEBUG oslo_vmware.api [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Task: {'id': task-3466861, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073549} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1198.939946] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1198.940149] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1198.940326] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1198.940505] env[68194]: INFO nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1198.944830] env[68194]: DEBUG nova.compute.claims [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1198.945007] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1198.945220] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1198.962498] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1199.013499] env[68194]: DEBUG oslo_vmware.rw_handles [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b08a3924-6753-4d92-b923-a8ac83de395f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1199.078609] env[68194]: DEBUG oslo_vmware.rw_handles [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1199.078609] env[68194]: DEBUG oslo_vmware.rw_handles [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b08a3924-6753-4d92-b923-a8ac83de395f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1199.344146] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-622ae77a-9800-4fba-8fe8-62e9c3fadb8b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.351956] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88fa7319-ea10-4833-83d4-a38b287d6aab {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.382311] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd090984-c81d-471e-97ed-f089984b187a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.389344] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59db1c0e-053c-4602-b1f6-fc20437b1fb9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.402050] env[68194]: DEBUG nova.compute.provider_tree [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1199.410240] env[68194]: DEBUG nova.scheduler.client.report [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1199.426506] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.481s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1199.427085] env[68194]: ERROR nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1199.427085] env[68194]: Faults: ['InvalidArgument'] [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] Traceback (most recent call last): [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] self.driver.spawn(context, instance, image_meta, [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] self._fetch_image_if_missing(context, vi) [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] image_cache(vi, tmp_image_ds_loc) [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] vm_util.copy_virtual_disk( [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] session._wait_for_task(vmdk_copy_task) [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] return self.wait_for_task(task_ref) [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] return evt.wait() [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] result = hub.switch() [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] return self.greenlet.switch() [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] self.f(*self.args, **self.kw) [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] raise exceptions.translate_fault(task_info.error) [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] Faults: ['InvalidArgument'] [ 1199.427085] env[68194]: ERROR nova.compute.manager [instance: fe79ae03-c408-4d18-914e-e64065998663] [ 1199.428063] env[68194]: DEBUG nova.compute.utils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1199.429580] env[68194]: DEBUG nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Build of instance fe79ae03-c408-4d18-914e-e64065998663 was re-scheduled: A specified parameter was not correct: fileType [ 1199.429580] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1199.429958] env[68194]: DEBUG nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1199.430153] env[68194]: DEBUG nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1199.430339] env[68194]: DEBUG nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1199.430505] env[68194]: DEBUG nova.network.neutron [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1199.741538] env[68194]: DEBUG nova.network.neutron [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.753828] env[68194]: INFO nova.compute.manager [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Took 0.32 seconds to deallocate network for instance. [ 1199.842015] env[68194]: INFO nova.scheduler.client.report [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Deleted allocations for instance fe79ae03-c408-4d18-914e-e64065998663 [ 1199.862141] env[68194]: DEBUG oslo_concurrency.lockutils [None req-4ab3869a-dff9-451b-9673-69bb5db2e1c4 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Lock "fe79ae03-c408-4d18-914e-e64065998663" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 549.377s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1199.862825] env[68194]: DEBUG oslo_concurrency.lockutils [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Lock "fe79ae03-c408-4d18-914e-e64065998663" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 350.721s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1199.863061] env[68194]: DEBUG oslo_concurrency.lockutils [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Acquiring lock "fe79ae03-c408-4d18-914e-e64065998663-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1199.863986] env[68194]: DEBUG oslo_concurrency.lockutils [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Lock "fe79ae03-c408-4d18-914e-e64065998663-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1199.863986] env[68194]: DEBUG oslo_concurrency.lockutils [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Lock "fe79ae03-c408-4d18-914e-e64065998663-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1199.865688] env[68194]: INFO nova.compute.manager [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Terminating instance [ 1199.867933] env[68194]: DEBUG nova.compute.manager [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1199.867933] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1199.868184] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a592bfef-bbdb-43c0-94ad-a8bf9f4939b8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.878023] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f284e016-8d69-4847-9706-13786589d539 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1199.889817] env[68194]: DEBUG nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1199.910945] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance fe79ae03-c408-4d18-914e-e64065998663 could not be found. [ 1199.911176] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1199.911359] env[68194]: INFO nova.compute.manager [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] [instance: fe79ae03-c408-4d18-914e-e64065998663] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1199.911605] env[68194]: DEBUG oslo.service.loopingcall [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1199.911859] env[68194]: DEBUG nova.compute.manager [-] [instance: fe79ae03-c408-4d18-914e-e64065998663] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1199.911959] env[68194]: DEBUG nova.network.neutron [-] [instance: fe79ae03-c408-4d18-914e-e64065998663] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1199.939121] env[68194]: DEBUG nova.network.neutron [-] [instance: fe79ae03-c408-4d18-914e-e64065998663] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1199.945456] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1199.945745] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1199.947188] env[68194]: INFO nova.compute.claims [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1199.951167] env[68194]: INFO nova.compute.manager [-] [instance: fe79ae03-c408-4d18-914e-e64065998663] Took 0.04 seconds to deallocate network for instance. [ 1200.059707] env[68194]: DEBUG oslo_concurrency.lockutils [None req-74f5b357-fa5c-4e70-9941-f4b53c4a7901 tempest-ServersNegativeTestJSON-1254573690 tempest-ServersNegativeTestJSON-1254573690-project-member] Lock "fe79ae03-c408-4d18-914e-e64065998663" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1200.299958] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-890861c1-7766-40cd-bbd1-f4a6762b2479 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.307111] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1da138b-098c-4dd3-aff8-adb52b9479b2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.347098] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3063d208-f39d-40bc-9227-b72ba60ca641 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.354637] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60c85b0a-8dae-4da6-9fbf-f2eb86c535b2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.369123] env[68194]: DEBUG nova.compute.provider_tree [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1200.377920] env[68194]: DEBUG nova.scheduler.client.report [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1200.392879] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.447s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1200.393435] env[68194]: DEBUG nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1200.425320] env[68194]: DEBUG nova.compute.utils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1200.427439] env[68194]: DEBUG nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1200.427439] env[68194]: DEBUG nova.network.neutron [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1200.436938] env[68194]: DEBUG nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1200.491075] env[68194]: DEBUG nova.policy [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'f138c432652648a8aae62855c4a32ee7', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '338d06a759af43d3a3c326cb627953e3', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1200.507144] env[68194]: DEBUG nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1200.534536] env[68194]: DEBUG nova.virt.hardware [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1200.534536] env[68194]: DEBUG nova.virt.hardware [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1200.534536] env[68194]: DEBUG nova.virt.hardware [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1200.534736] env[68194]: DEBUG nova.virt.hardware [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1200.534832] env[68194]: DEBUG nova.virt.hardware [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1200.534959] env[68194]: DEBUG nova.virt.hardware [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1200.535191] env[68194]: DEBUG nova.virt.hardware [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1200.535378] env[68194]: DEBUG nova.virt.hardware [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1200.535522] env[68194]: DEBUG nova.virt.hardware [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1200.535685] env[68194]: DEBUG nova.virt.hardware [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1200.535857] env[68194]: DEBUG nova.virt.hardware [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1200.536723] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3a16fa7-c0f3-4871-82cb-959c5ef4559b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.544724] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65c29f3d-e5ee-4453-bdaf-d19b0c35e1a8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1200.781526] env[68194]: DEBUG nova.network.neutron [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Successfully created port: c208d26b-2f88-4212-b954-49eab5537f6e {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1201.362467] env[68194]: DEBUG nova.network.neutron [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Successfully updated port: c208d26b-2f88-4212-b954-49eab5537f6e {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1201.372517] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquiring lock "refresh_cache-bcb53c97-8d95-4d67-b310-d19087b0b298" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1201.372669] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquired lock "refresh_cache-bcb53c97-8d95-4d67-b310-d19087b0b298" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1201.372819] env[68194]: DEBUG nova.network.neutron [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1201.415785] env[68194]: DEBUG nova.network.neutron [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1201.574547] env[68194]: DEBUG nova.network.neutron [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Updating instance_info_cache with network_info: [{"id": "c208d26b-2f88-4212-b954-49eab5537f6e", "address": "fa:16:3e:cc:6f:fe", "network": {"id": "66c67216-09df-45c9-b2bc-5ae0bd4b26a2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1055552377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "338d06a759af43d3a3c326cb627953e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc208d26b-2f", "ovs_interfaceid": "c208d26b-2f88-4212-b954-49eab5537f6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1201.588056] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Releasing lock "refresh_cache-bcb53c97-8d95-4d67-b310-d19087b0b298" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1201.588056] env[68194]: DEBUG nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Instance network_info: |[{"id": "c208d26b-2f88-4212-b954-49eab5537f6e", "address": "fa:16:3e:cc:6f:fe", "network": {"id": "66c67216-09df-45c9-b2bc-5ae0bd4b26a2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1055552377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "338d06a759af43d3a3c326cb627953e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc208d26b-2f", "ovs_interfaceid": "c208d26b-2f88-4212-b954-49eab5537f6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1201.588271] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:cc:6f:fe', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2be3fdb5-359e-43bd-8c20-2ff00e81db55', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'c208d26b-2f88-4212-b954-49eab5537f6e', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1201.595453] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Creating folder: Project (338d06a759af43d3a3c326cb627953e3). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1201.595950] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-dd791909-8e63-4b64-97e3-636699315e5e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.606752] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Created folder: Project (338d06a759af43d3a3c326cb627953e3) in parent group-v692426. [ 1201.606910] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Creating folder: Instances. Parent ref: group-v692500. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1201.607137] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-d26e24e1-b3a5-466f-8270-b792353427ef {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.615327] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Created folder: Instances in parent group-v692500. [ 1201.615550] env[68194]: DEBUG oslo.service.loopingcall [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1201.615718] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1201.615895] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-4657043c-80e2-45ca-be4b-d3842714db79 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1201.634097] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1201.634097] env[68194]: value = "task-3466864" [ 1201.634097] env[68194]: _type = "Task" [ 1201.634097] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1201.641194] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466864, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1201.837121] env[68194]: DEBUG nova.compute.manager [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Received event network-vif-plugged-c208d26b-2f88-4212-b954-49eab5537f6e {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1201.837359] env[68194]: DEBUG oslo_concurrency.lockutils [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] Acquiring lock "bcb53c97-8d95-4d67-b310-d19087b0b298-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1201.837611] env[68194]: DEBUG oslo_concurrency.lockutils [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] Lock "bcb53c97-8d95-4d67-b310-d19087b0b298-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1201.837772] env[68194]: DEBUG oslo_concurrency.lockutils [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] Lock "bcb53c97-8d95-4d67-b310-d19087b0b298-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1201.837942] env[68194]: DEBUG nova.compute.manager [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] No waiting events found dispatching network-vif-plugged-c208d26b-2f88-4212-b954-49eab5537f6e {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1201.838128] env[68194]: WARNING nova.compute.manager [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Received unexpected event network-vif-plugged-c208d26b-2f88-4212-b954-49eab5537f6e for instance with vm_state building and task_state spawning. [ 1201.838293] env[68194]: DEBUG nova.compute.manager [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Received event network-changed-c208d26b-2f88-4212-b954-49eab5537f6e {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1201.838501] env[68194]: DEBUG nova.compute.manager [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Refreshing instance network info cache due to event network-changed-c208d26b-2f88-4212-b954-49eab5537f6e. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1201.838743] env[68194]: DEBUG oslo_concurrency.lockutils [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] Acquiring lock "refresh_cache-bcb53c97-8d95-4d67-b310-d19087b0b298" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1201.838835] env[68194]: DEBUG oslo_concurrency.lockutils [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] Acquired lock "refresh_cache-bcb53c97-8d95-4d67-b310-d19087b0b298" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1201.838991] env[68194]: DEBUG nova.network.neutron [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Refreshing network info cache for port c208d26b-2f88-4212-b954-49eab5537f6e {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1202.092121] env[68194]: DEBUG nova.network.neutron [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Updated VIF entry in instance network info cache for port c208d26b-2f88-4212-b954-49eab5537f6e. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1202.092477] env[68194]: DEBUG nova.network.neutron [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Updating instance_info_cache with network_info: [{"id": "c208d26b-2f88-4212-b954-49eab5537f6e", "address": "fa:16:3e:cc:6f:fe", "network": {"id": "66c67216-09df-45c9-b2bc-5ae0bd4b26a2", "bridge": "br-int", "label": "tempest-ListServerFiltersTestJSON-1055552377-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "338d06a759af43d3a3c326cb627953e3", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2be3fdb5-359e-43bd-8c20-2ff00e81db55", "external-id": "nsx-vlan-transportzone-986", "segmentation_id": 986, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapc208d26b-2f", "ovs_interfaceid": "c208d26b-2f88-4212-b954-49eab5537f6e", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1202.101405] env[68194]: DEBUG oslo_concurrency.lockutils [req-ca78bbb5-b584-493e-b136-9e07a0cdaa68 req-c07f58ae-3083-4bb4-8848-e5a762fec21c service nova] Releasing lock "refresh_cache-bcb53c97-8d95-4d67-b310-d19087b0b298" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1202.145955] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466864, 'name': CreateVM_Task, 'duration_secs': 0.294703} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1202.145955] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1202.146207] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1202.146346] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1202.146731] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1202.146900] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a04400b0-0c31-4a82-bd0a-ac12247fc642 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1202.151323] env[68194]: DEBUG oslo_vmware.api [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Waiting for the task: (returnval){ [ 1202.151323] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]528b5130-1598-4bb6-6c3b-9bfab795ed2f" [ 1202.151323] env[68194]: _type = "Task" [ 1202.151323] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1202.158805] env[68194]: DEBUG oslo_vmware.api [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]528b5130-1598-4bb6-6c3b-9bfab795ed2f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1202.660786] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1202.661080] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1202.661362] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1211.418442] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_power_states {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1211.444229] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Getting list of instances from cluster (obj){ [ 1211.444229] env[68194]: value = "domain-c8" [ 1211.444229] env[68194]: _type = "ClusterComputeResource" [ 1211.444229] env[68194]: } {{(pid=68194) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1211.445559] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25412845-66c8-4058-a369-a4d1c664ef93 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1211.462793] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Got total of 10 instances {{(pid=68194) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1211.462980] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid cef5dc8e-1a5c-4248-9bac-ff25880588ed {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1211.463189] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid 2243c245-bbb3-43b7-89a9-fb727d452885 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1211.463350] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid 108001a3-ff36-475b-a7a5-8e0e197c62a8 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1211.463504] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid d2e2cf0b-1028-4df3-9170-dc616a04fdc3 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1211.463660] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid ff16d7c1-a601-4ac6-be52-823727c8b843 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1211.463812] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid 20f4ed05-ee86-416b-8bf7-d446d33bab6f {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1211.463963] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid ce0f1886-189f-4ab3-9ed6-376dce542f5f {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1211.464130] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid 47cc57d7-40db-4a19-a983-f4e9ea9e8984 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1211.464283] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid 7ed8ac34-04a2-49fe-9429-f636ff6fff8a {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1211.464431] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid bcb53c97-8d95-4d67-b310-d19087b0b298 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1211.464750] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1211.465432] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "2243c245-bbb3-43b7-89a9-fb727d452885" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1211.465432] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "108001a3-ff36-475b-a7a5-8e0e197c62a8" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1211.465432] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "d2e2cf0b-1028-4df3-9170-dc616a04fdc3" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1211.465598] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "ff16d7c1-a601-4ac6-be52-823727c8b843" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1211.465829] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1211.465967] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1211.466178] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1211.466367] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "7ed8ac34-04a2-49fe-9429-f636ff6fff8a" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1211.466558] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "bcb53c97-8d95-4d67-b310-d19087b0b298" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1216.521798] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1216.521798] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1219.968268] env[68194]: DEBUG oslo_concurrency.lockutils [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquiring lock "bcb53c97-8d95-4d67-b310-d19087b0b298" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1225.720000] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Acquiring lock "bf9766c7-1495-4edd-92bd-06a0d036855e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1225.720272] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Lock "bf9766c7-1495-4edd-92bd-06a0d036855e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1231.311634] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Acquiring lock "7b430b72-05fa-49a6-8bbb-7c083cb96457" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1231.311944] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Lock "7b430b72-05fa-49a6-8bbb-7c083cb96457" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1234.419602] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1234.419946] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1235.416834] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1238.416229] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1239.415935] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1239.416212] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1239.897120] env[68194]: DEBUG oslo_concurrency.lockutils [None req-526474d6-d4b8-43ce-901d-e1604e91dc8f tempest-ServerRescueTestJSONUnderV235-1403302761 tempest-ServerRescueTestJSONUnderV235-1403302761-project-member] Acquiring lock "ada24904-c85b-4af9-be4c-afc8514b7307" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1239.898154] env[68194]: DEBUG oslo_concurrency.lockutils [None req-526474d6-d4b8-43ce-901d-e1604e91dc8f tempest-ServerRescueTestJSONUnderV235-1403302761 tempest-ServerRescueTestJSONUnderV235-1403302761-project-member] Lock "ada24904-c85b-4af9-be4c-afc8514b7307" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1240.411327] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1240.416378] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1240.428858] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1240.429069] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1240.429244] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1240.429400] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1240.430540] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c7dd7b8-e5dc-4141-9677-240198ed562d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.439350] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac08a192-0778-430a-bbb2-277ca9e79317 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.453069] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f603313-1064-4e13-995e-faafc58cb1c0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.459215] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f66d693e-48ca-4af5-926c-07e45c2ae7e2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.487486] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180964MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1240.487651] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1240.487844] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1240.562307] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance cef5dc8e-1a5c-4248-9bac-ff25880588ed actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1240.562468] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2243c245-bbb3-43b7-89a9-fb727d452885 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1240.562594] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1240.562716] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1240.562835] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ff16d7c1-a601-4ac6-be52-823727c8b843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1240.562951] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1240.563083] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ce0f1886-189f-4ab3-9ed6-376dce542f5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1240.563203] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1240.563318] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1240.563430] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcb53c97-8d95-4d67-b310-d19087b0b298 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1240.574175] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1ef2d43f-9e6f-4354-91c5-9e1155c2a382 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.584033] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 6ee32532-e88c-4eb6-9e3f-c1ea42a4d560 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.593487] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b487291e-1b85-4064-9949-3d8895b6dcae has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.602747] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 939645a5-ef9a-4951-ada2-6bd95cec173f has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.611520] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 9a4d19c2-79b2-4323-a68a-6ba2c82e4d13 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.620394] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance de2cd600-0b9a-45f2-a0e7-b78dfd52d0f7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.629424] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4982c984-c85f-4c23-b643-9ad8a7a4f405 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.638176] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3f7dc638-fa6b-40b0-90dc-b4ddf0b2b99b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.646937] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4635c03b-1415-4ad9-8825-8997c68ad9e7 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.655972] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.665255] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.674189] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bf9766c7-1495-4edd-92bd-06a0d036855e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.682560] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.690719] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ada24904-c85b-4af9-be4c-afc8514b7307 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1240.691019] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1240.691172] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1240.934481] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-55710b53-6f6d-41bf-8aaa-5d28a8309c2f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.943394] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7abf8ee8-8152-43de-9442-bdc2c9a8c0ca {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.973239] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2f6969c1-cd80-47d8-bf19-1e3fa145f41b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.980138] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-38912dc8-c87e-443f-8517-b3653dfec37a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1240.992666] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1241.001016] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1241.016499] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1241.016683] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.529s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1242.016934] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1242.017342] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1242.017342] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1242.039606] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1242.039781] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1242.039904] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1242.040046] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1242.040178] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1242.040303] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1242.040425] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1242.040545] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1242.040663] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1242.040782] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1242.040904] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1242.041431] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1244.603502] env[68194]: DEBUG oslo_concurrency.lockutils [None req-99b934d3-4269-45c1-bc95-69051e00f9b4 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "fa0796d0-14e5-4bcc-9571-3193f4c1185e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1244.604155] env[68194]: DEBUG oslo_concurrency.lockutils [None req-99b934d3-4269-45c1-bc95-69051e00f9b4 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "fa0796d0-14e5-4bcc-9571-3193f4c1185e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1248.439774] env[68194]: WARNING oslo_vmware.rw_handles [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1248.439774] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1248.439774] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1248.439774] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1248.439774] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1248.439774] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1248.439774] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1248.439774] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1248.439774] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1248.439774] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1248.439774] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1248.439774] env[68194]: ERROR oslo_vmware.rw_handles [ 1248.440575] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/b08a3924-6753-4d92-b923-a8ac83de395f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1248.442412] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1248.442653] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Copying Virtual Disk [datastore1] vmware_temp/b08a3924-6753-4d92-b923-a8ac83de395f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/b08a3924-6753-4d92-b923-a8ac83de395f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1248.442945] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-e396ae47-e371-42a4-b1db-b4f02d2689fa {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.450829] env[68194]: DEBUG oslo_vmware.api [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Waiting for the task: (returnval){ [ 1248.450829] env[68194]: value = "task-3466865" [ 1248.450829] env[68194]: _type = "Task" [ 1248.450829] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.458678] env[68194]: DEBUG oslo_vmware.api [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Task: {'id': task-3466865, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1248.961257] env[68194]: DEBUG oslo_vmware.exceptions [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1248.961502] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1248.962044] env[68194]: ERROR nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1248.962044] env[68194]: Faults: ['InvalidArgument'] [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Traceback (most recent call last): [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] yield resources [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] self.driver.spawn(context, instance, image_meta, [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] self._fetch_image_if_missing(context, vi) [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] image_cache(vi, tmp_image_ds_loc) [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] vm_util.copy_virtual_disk( [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] session._wait_for_task(vmdk_copy_task) [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] return self.wait_for_task(task_ref) [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] return evt.wait() [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] result = hub.switch() [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] return self.greenlet.switch() [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] self.f(*self.args, **self.kw) [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] raise exceptions.translate_fault(task_info.error) [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Faults: ['InvalidArgument'] [ 1248.962044] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] [ 1248.963189] env[68194]: INFO nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Terminating instance [ 1248.963891] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1248.964122] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1248.964732] env[68194]: DEBUG nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1248.964925] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1248.965160] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cc67c63c-821c-44b1-9431-768584488a48 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.967453] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-850a75f8-7e37-4713-a423-65abf74c3d5b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.974082] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1248.974321] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-1722abfa-af85-46e1-beea-8c1989a79d40 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.976576] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1248.976756] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1248.977714] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ac23eee-e208-4cee-b1f5-aef77d5ec564 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1248.982351] env[68194]: DEBUG oslo_vmware.api [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for the task: (returnval){ [ 1248.982351] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52178226-cd8a-939d-74ed-b15eeb884559" [ 1248.982351] env[68194]: _type = "Task" [ 1248.982351] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1248.989641] env[68194]: DEBUG oslo_vmware.api [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52178226-cd8a-939d-74ed-b15eeb884559, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.049352] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1249.049569] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1249.049749] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Deleting the datastore file [datastore1] cef5dc8e-1a5c-4248-9bac-ff25880588ed {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1249.050026] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-64f0ae25-6e72-45d3-8aca-3ed8a6a22723 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.056737] env[68194]: DEBUG oslo_vmware.api [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Waiting for the task: (returnval){ [ 1249.056737] env[68194]: value = "task-3466867" [ 1249.056737] env[68194]: _type = "Task" [ 1249.056737] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1249.064972] env[68194]: DEBUG oslo_vmware.api [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Task: {'id': task-3466867, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1249.492966] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1249.493250] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Creating directory with path [datastore1] vmware_temp/7114528b-27fc-46fd-9900-a3d3240a8f0d/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1249.493478] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5c4d8031-b5a4-4413-ab11-cc808ddf88b8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.505815] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Created directory with path [datastore1] vmware_temp/7114528b-27fc-46fd-9900-a3d3240a8f0d/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1249.506016] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Fetch image to [datastore1] vmware_temp/7114528b-27fc-46fd-9900-a3d3240a8f0d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1249.506200] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/7114528b-27fc-46fd-9900-a3d3240a8f0d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1249.506921] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-674c7143-2ca0-4e85-9c96-b523d55c7e74 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.513270] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ce0bb2-b9c9-49c2-ab2a-d87e14bd1526 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.522035] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d0165e8e-e44d-470e-b7d2-bf16bfc58e8b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.552069] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9841427f-cd47-4b2d-9822-7730ec7d8b98 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.557348] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-01453d7a-a3cd-4dff-a1f5-f1f1746bae4a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1249.566992] env[68194]: DEBUG oslo_vmware.api [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Task: {'id': task-3466867, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070718} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1249.567218] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1249.567395] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1249.567565] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1249.567764] env[68194]: INFO nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1249.569880] env[68194]: DEBUG nova.compute.claims [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1249.570063] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1249.570288] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1249.579322] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1249.726097] env[68194]: DEBUG oslo_vmware.rw_handles [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7114528b-27fc-46fd-9900-a3d3240a8f0d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1249.786053] env[68194]: DEBUG oslo_vmware.rw_handles [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1249.786240] env[68194]: DEBUG oslo_vmware.rw_handles [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7114528b-27fc-46fd-9900-a3d3240a8f0d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1249.993466] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cdfc5a01-7a53-4861-8eea-fd273f9ba990 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.002709] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f93614a-579b-4ee8-8590-a92fc2c62213 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.033239] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d4ae7a1-0fc6-4444-ae44-46a4a260dad9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.040688] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5a9efaac-61d0-4e86-8a4d-7298d0211cc6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.054942] env[68194]: DEBUG nova.compute.provider_tree [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1250.068232] env[68194]: DEBUG nova.scheduler.client.report [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1250.084166] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.511s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1250.084166] env[68194]: ERROR nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1250.084166] env[68194]: Faults: ['InvalidArgument'] [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Traceback (most recent call last): [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] self.driver.spawn(context, instance, image_meta, [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] self._fetch_image_if_missing(context, vi) [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] image_cache(vi, tmp_image_ds_loc) [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] vm_util.copy_virtual_disk( [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] session._wait_for_task(vmdk_copy_task) [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] return self.wait_for_task(task_ref) [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] return evt.wait() [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] result = hub.switch() [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] return self.greenlet.switch() [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] self.f(*self.args, **self.kw) [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] raise exceptions.translate_fault(task_info.error) [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Faults: ['InvalidArgument'] [ 1250.084166] env[68194]: ERROR nova.compute.manager [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] [ 1250.084166] env[68194]: DEBUG nova.compute.utils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1250.086408] env[68194]: DEBUG nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Build of instance cef5dc8e-1a5c-4248-9bac-ff25880588ed was re-scheduled: A specified parameter was not correct: fileType [ 1250.086408] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1250.086853] env[68194]: DEBUG nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1250.087044] env[68194]: DEBUG nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1250.087218] env[68194]: DEBUG nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1250.087428] env[68194]: DEBUG nova.network.neutron [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1250.675137] env[68194]: DEBUG nova.network.neutron [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.688961] env[68194]: INFO nova.compute.manager [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Took 0.60 seconds to deallocate network for instance. [ 1250.807051] env[68194]: INFO nova.scheduler.client.report [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Deleted allocations for instance cef5dc8e-1a5c-4248-9bac-ff25880588ed [ 1250.841048] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c25cfe32-faec-4905-a346-95dd1e2966da tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 600.308s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1250.842274] env[68194]: DEBUG oslo_concurrency.lockutils [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 402.115s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1250.842486] env[68194]: DEBUG oslo_concurrency.lockutils [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Acquiring lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1250.842691] env[68194]: DEBUG oslo_concurrency.lockutils [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1250.842855] env[68194]: DEBUG oslo_concurrency.lockutils [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1250.847935] env[68194]: INFO nova.compute.manager [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Terminating instance [ 1250.851601] env[68194]: DEBUG nova.compute.manager [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1250.851838] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1250.852340] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-063a9f1a-ae0c-4e14-94d3-2aa9d6dbe748 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.855747] env[68194]: DEBUG nova.compute.manager [None req-fc0cbb24-0a7d-4392-b4ed-d6dbdb96d654 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: f141f989-f0c8-4943-982d-adf499342ec3] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1250.869016] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd1c159d-82f6-4cf4-94f3-16ef5439a400 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1250.899785] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance cef5dc8e-1a5c-4248-9bac-ff25880588ed could not be found. [ 1250.900043] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1250.900213] env[68194]: INFO nova.compute.manager [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1250.900462] env[68194]: DEBUG oslo.service.loopingcall [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1250.900694] env[68194]: DEBUG nova.compute.manager [-] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1250.900795] env[68194]: DEBUG nova.network.neutron [-] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1250.913141] env[68194]: DEBUG nova.compute.manager [None req-fc0cbb24-0a7d-4392-b4ed-d6dbdb96d654 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: f141f989-f0c8-4943-982d-adf499342ec3] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1250.928356] env[68194]: DEBUG nova.network.neutron [-] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1250.935488] env[68194]: DEBUG oslo_concurrency.lockutils [None req-fc0cbb24-0a7d-4392-b4ed-d6dbdb96d654 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "f141f989-f0c8-4943-982d-adf499342ec3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.700s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1250.939333] env[68194]: INFO nova.compute.manager [-] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] Took 0.04 seconds to deallocate network for instance. [ 1250.946519] env[68194]: DEBUG nova.compute.manager [None req-17c84160-d67c-46a1-a481-3a64e5662361 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: 811aeb6b-eb94-4618-a0da-f391184cbd70] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1250.974127] env[68194]: DEBUG nova.compute.manager [None req-17c84160-d67c-46a1-a481-3a64e5662361 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: 811aeb6b-eb94-4618-a0da-f391184cbd70] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1251.006172] env[68194]: DEBUG oslo_concurrency.lockutils [None req-17c84160-d67c-46a1-a481-3a64e5662361 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "811aeb6b-eb94-4618-a0da-f391184cbd70" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 227.198s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1251.017370] env[68194]: DEBUG nova.compute.manager [None req-84c32697-447a-474a-bf61-24d4205a6db5 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: 554a4ee3-092b-443a-99fc-63d9d753c8ec] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1251.047176] env[68194]: DEBUG nova.compute.manager [None req-84c32697-447a-474a-bf61-24d4205a6db5 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] [instance: 554a4ee3-092b-443a-99fc-63d9d753c8ec] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1251.058497] env[68194]: DEBUG oslo_concurrency.lockutils [None req-43fb2578-323c-401f-92f7-2293c96b9ae1 tempest-ServerActionsTestOtherA-495690470 tempest-ServerActionsTestOtherA-495690470-project-member] Lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.216s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1251.059661] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 39.595s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1251.059865] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: cef5dc8e-1a5c-4248-9bac-ff25880588ed] During sync_power_state the instance has a pending task (deleting). Skip. [ 1251.060155] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "cef5dc8e-1a5c-4248-9bac-ff25880588ed" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1251.072953] env[68194]: DEBUG oslo_concurrency.lockutils [None req-84c32697-447a-474a-bf61-24d4205a6db5 tempest-MigrationsAdminTest-1330538300 tempest-MigrationsAdminTest-1330538300-project-member] Lock "554a4ee3-092b-443a-99fc-63d9d753c8ec" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 225.531s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1251.083779] env[68194]: DEBUG nova.compute.manager [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] [instance: b5596341-1994-4187-a240-7e02d1534ea0] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1251.108105] env[68194]: DEBUG nova.compute.manager [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] [instance: b5596341-1994-4187-a240-7e02d1534ea0] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1251.129965] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] Lock "b5596341-1994-4187-a240-7e02d1534ea0" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.610s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1251.139585] env[68194]: DEBUG nova.compute.manager [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] [instance: 60be602e-d9e3-4f0f-972e-e19acbb3813e] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1251.164152] env[68194]: DEBUG nova.compute.manager [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] [instance: 60be602e-d9e3-4f0f-972e-e19acbb3813e] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1251.189017] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] Lock "60be602e-d9e3-4f0f-972e-e19acbb3813e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.643s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1251.201809] env[68194]: DEBUG nova.compute.manager [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] [instance: ef28d606-6b14-4fd1-9768-4dfc90b06fd5] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1251.229917] env[68194]: DEBUG nova.compute.manager [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] [instance: ef28d606-6b14-4fd1-9768-4dfc90b06fd5] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1251.252361] env[68194]: DEBUG oslo_concurrency.lockutils [None req-6e7929d2-5a4c-40e4-a4b2-ea770516f1be tempest-ListServersNegativeTestJSON-1391014317 tempest-ListServersNegativeTestJSON-1391014317-project-member] Lock "ef28d606-6b14-4fd1-9768-4dfc90b06fd5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.679s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1251.262194] env[68194]: DEBUG nova.compute.manager [None req-3f8e19db-4904-4dd5-829b-b620b1d6ca05 tempest-ServerMetadataNegativeTestJSON-1420612404 tempest-ServerMetadataNegativeTestJSON-1420612404-project-member] [instance: ec03aeaf-11e6-456f-b408-77557f77645b] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1251.285581] env[68194]: DEBUG nova.compute.manager [None req-3f8e19db-4904-4dd5-829b-b620b1d6ca05 tempest-ServerMetadataNegativeTestJSON-1420612404 tempest-ServerMetadataNegativeTestJSON-1420612404-project-member] [instance: ec03aeaf-11e6-456f-b408-77557f77645b] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1251.307246] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3f8e19db-4904-4dd5-829b-b620b1d6ca05 tempest-ServerMetadataNegativeTestJSON-1420612404 tempest-ServerMetadataNegativeTestJSON-1420612404-project-member] Lock "ec03aeaf-11e6-456f-b408-77557f77645b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 213.252s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1251.316895] env[68194]: DEBUG nova.compute.manager [None req-0292630c-1f92-4ba3-8569-fe1c8bf5d3b5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 1ef2d43f-9e6f-4354-91c5-9e1155c2a382] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1251.342503] env[68194]: DEBUG nova.compute.manager [None req-0292630c-1f92-4ba3-8569-fe1c8bf5d3b5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 1ef2d43f-9e6f-4354-91c5-9e1155c2a382] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1251.364261] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0292630c-1f92-4ba3-8569-fe1c8bf5d3b5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "1ef2d43f-9e6f-4354-91c5-9e1155c2a382" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.448s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1251.373397] env[68194]: DEBUG nova.compute.manager [None req-f344be3f-461d-4cdc-8d9a-92b7cfd668d0 tempest-ServersNegativeTestMultiTenantJSON-951148991 tempest-ServersNegativeTestMultiTenantJSON-951148991-project-member] [instance: 6ee32532-e88c-4eb6-9e3f-c1ea42a4d560] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1251.399931] env[68194]: DEBUG nova.compute.manager [None req-f344be3f-461d-4cdc-8d9a-92b7cfd668d0 tempest-ServersNegativeTestMultiTenantJSON-951148991 tempest-ServersNegativeTestMultiTenantJSON-951148991-project-member] [instance: 6ee32532-e88c-4eb6-9e3f-c1ea42a4d560] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1251.421787] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f344be3f-461d-4cdc-8d9a-92b7cfd668d0 tempest-ServersNegativeTestMultiTenantJSON-951148991 tempest-ServersNegativeTestMultiTenantJSON-951148991-project-member] Lock "6ee32532-e88c-4eb6-9e3f-c1ea42a4d560" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 198.749s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1251.433309] env[68194]: DEBUG nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1251.496017] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1251.496017] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1251.496017] env[68194]: INFO nova.compute.claims [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1251.850563] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-702bb524-1ea0-4894-83f9-06082a9b31c9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.859129] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-108b114d-2760-4f66-ab57-4f3596762924 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.888682] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28278593-6f47-4903-8ce4-266918d3e2e6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.896359] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a3f9c9b-b394-4251-a8b4-3945a19ba03a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1251.909247] env[68194]: DEBUG nova.compute.provider_tree [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1251.919131] env[68194]: DEBUG nova.scheduler.client.report [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1251.934599] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.440s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1251.935109] env[68194]: DEBUG nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1251.978727] env[68194]: DEBUG nova.compute.utils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1251.980434] env[68194]: DEBUG nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1251.980546] env[68194]: DEBUG nova.network.neutron [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1251.993268] env[68194]: DEBUG nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1252.073771] env[68194]: DEBUG nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1252.083390] env[68194]: DEBUG nova.policy [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '905b97edce374ad5a240d61220f66f80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05181674899f44e7bb6d234643c3e6b6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1252.100398] env[68194]: DEBUG nova.virt.hardware [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1252.100651] env[68194]: DEBUG nova.virt.hardware [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1252.100809] env[68194]: DEBUG nova.virt.hardware [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1252.100995] env[68194]: DEBUG nova.virt.hardware [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1252.101162] env[68194]: DEBUG nova.virt.hardware [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1252.101311] env[68194]: DEBUG nova.virt.hardware [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1252.101519] env[68194]: DEBUG nova.virt.hardware [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1252.101679] env[68194]: DEBUG nova.virt.hardware [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1252.101846] env[68194]: DEBUG nova.virt.hardware [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1252.102236] env[68194]: DEBUG nova.virt.hardware [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1252.102236] env[68194]: DEBUG nova.virt.hardware [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1252.103022] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47c4cab9-4298-415e-8914-8dc88eeda51a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.114016] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3a1e4f91-9d77-4eda-b0fc-65c94d080ac1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1252.560743] env[68194]: DEBUG nova.network.neutron [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Successfully created port: 67d17d6c-db1c-49dd-9300-a3903b3f85ab {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1253.550704] env[68194]: DEBUG nova.compute.manager [req-d98d3fea-65ee-4b78-a8c5-9824f2681006 req-3cd30256-cda8-4163-942e-e6ae6a15e6c3 service nova] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Received event network-vif-plugged-67d17d6c-db1c-49dd-9300-a3903b3f85ab {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1253.550985] env[68194]: DEBUG oslo_concurrency.lockutils [req-d98d3fea-65ee-4b78-a8c5-9824f2681006 req-3cd30256-cda8-4163-942e-e6ae6a15e6c3 service nova] Acquiring lock "b487291e-1b85-4064-9949-3d8895b6dcae-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1253.551264] env[68194]: DEBUG oslo_concurrency.lockutils [req-d98d3fea-65ee-4b78-a8c5-9824f2681006 req-3cd30256-cda8-4163-942e-e6ae6a15e6c3 service nova] Lock "b487291e-1b85-4064-9949-3d8895b6dcae-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1253.551344] env[68194]: DEBUG oslo_concurrency.lockutils [req-d98d3fea-65ee-4b78-a8c5-9824f2681006 req-3cd30256-cda8-4163-942e-e6ae6a15e6c3 service nova] Lock "b487291e-1b85-4064-9949-3d8895b6dcae-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1253.551529] env[68194]: DEBUG nova.compute.manager [req-d98d3fea-65ee-4b78-a8c5-9824f2681006 req-3cd30256-cda8-4163-942e-e6ae6a15e6c3 service nova] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] No waiting events found dispatching network-vif-plugged-67d17d6c-db1c-49dd-9300-a3903b3f85ab {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1253.552300] env[68194]: WARNING nova.compute.manager [req-d98d3fea-65ee-4b78-a8c5-9824f2681006 req-3cd30256-cda8-4163-942e-e6ae6a15e6c3 service nova] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Received unexpected event network-vif-plugged-67d17d6c-db1c-49dd-9300-a3903b3f85ab for instance with vm_state building and task_state spawning. [ 1253.555749] env[68194]: DEBUG nova.network.neutron [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Successfully updated port: 67d17d6c-db1c-49dd-9300-a3903b3f85ab {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1253.566717] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "refresh_cache-b487291e-1b85-4064-9949-3d8895b6dcae" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1253.566872] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired lock "refresh_cache-b487291e-1b85-4064-9949-3d8895b6dcae" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1253.567036] env[68194]: DEBUG nova.network.neutron [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1253.611509] env[68194]: DEBUG nova.network.neutron [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1253.776581] env[68194]: DEBUG nova.network.neutron [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Updating instance_info_cache with network_info: [{"id": "67d17d6c-db1c-49dd-9300-a3903b3f85ab", "address": "fa:16:3e:0c:2e:34", "network": {"id": "327b172e-477f-4070-9bcb-c1216237bde9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-980534088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05181674899f44e7bb6d234643c3e6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67d17d6c-db", "ovs_interfaceid": "67d17d6c-db1c-49dd-9300-a3903b3f85ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1253.788753] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Releasing lock "refresh_cache-b487291e-1b85-4064-9949-3d8895b6dcae" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1253.789091] env[68194]: DEBUG nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Instance network_info: |[{"id": "67d17d6c-db1c-49dd-9300-a3903b3f85ab", "address": "fa:16:3e:0c:2e:34", "network": {"id": "327b172e-477f-4070-9bcb-c1216237bde9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-980534088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05181674899f44e7bb6d234643c3e6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67d17d6c-db", "ovs_interfaceid": "67d17d6c-db1c-49dd-9300-a3903b3f85ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1253.789490] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:0c:2e:34', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3291573-fad8-48cc-a965-c3554e7cee4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '67d17d6c-db1c-49dd-9300-a3903b3f85ab', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1253.797444] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Creating folder: Project (05181674899f44e7bb6d234643c3e6b6). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1253.798046] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-94495abc-5524-475c-855b-587573212a6b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.810092] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Created folder: Project (05181674899f44e7bb6d234643c3e6b6) in parent group-v692426. [ 1253.810245] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Creating folder: Instances. Parent ref: group-v692503. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1253.810455] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-8a0062f3-fde9-4a3c-bd2b-fbb2be002906 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.819015] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Created folder: Instances in parent group-v692503. [ 1253.819255] env[68194]: DEBUG oslo.service.loopingcall [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1253.819438] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1253.819636] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-bfe9eb9a-7626-463a-9cfe-e07e2ca9d015 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1253.838351] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1253.838351] env[68194]: value = "task-3466870" [ 1253.838351] env[68194]: _type = "Task" [ 1253.838351] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1253.846048] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466870, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.349200] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466870, 'name': CreateVM_Task, 'duration_secs': 0.30824} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1254.349443] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1254.358330] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1254.358509] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1254.358869] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1254.359156] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-4ecf4e2a-f563-4671-bc0c-bf9ac6d6c67f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1254.364083] env[68194]: DEBUG oslo_vmware.api [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 1254.364083] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52fb9328-15a3-5470-5c52-6a6a7dbac7f3" [ 1254.364083] env[68194]: _type = "Task" [ 1254.364083] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1254.372873] env[68194]: DEBUG oslo_vmware.api [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52fb9328-15a3-5470-5c52-6a6a7dbac7f3, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1254.875662] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1254.876035] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1254.876218] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1255.831446] env[68194]: DEBUG nova.compute.manager [req-7943d674-1a2a-4969-a3b5-f45b2f677bf1 req-6a73b0d1-a1a1-402a-9a25-d5be149dc83a service nova] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Received event network-changed-67d17d6c-db1c-49dd-9300-a3903b3f85ab {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1255.831446] env[68194]: DEBUG nova.compute.manager [req-7943d674-1a2a-4969-a3b5-f45b2f677bf1 req-6a73b0d1-a1a1-402a-9a25-d5be149dc83a service nova] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Refreshing instance network info cache due to event network-changed-67d17d6c-db1c-49dd-9300-a3903b3f85ab. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1255.831446] env[68194]: DEBUG oslo_concurrency.lockutils [req-7943d674-1a2a-4969-a3b5-f45b2f677bf1 req-6a73b0d1-a1a1-402a-9a25-d5be149dc83a service nova] Acquiring lock "refresh_cache-b487291e-1b85-4064-9949-3d8895b6dcae" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1255.831446] env[68194]: DEBUG oslo_concurrency.lockutils [req-7943d674-1a2a-4969-a3b5-f45b2f677bf1 req-6a73b0d1-a1a1-402a-9a25-d5be149dc83a service nova] Acquired lock "refresh_cache-b487291e-1b85-4064-9949-3d8895b6dcae" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1255.831446] env[68194]: DEBUG nova.network.neutron [req-7943d674-1a2a-4969-a3b5-f45b2f677bf1 req-6a73b0d1-a1a1-402a-9a25-d5be149dc83a service nova] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Refreshing network info cache for port 67d17d6c-db1c-49dd-9300-a3903b3f85ab {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1256.079135] env[68194]: DEBUG nova.network.neutron [req-7943d674-1a2a-4969-a3b5-f45b2f677bf1 req-6a73b0d1-a1a1-402a-9a25-d5be149dc83a service nova] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Updated VIF entry in instance network info cache for port 67d17d6c-db1c-49dd-9300-a3903b3f85ab. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1256.079571] env[68194]: DEBUG nova.network.neutron [req-7943d674-1a2a-4969-a3b5-f45b2f677bf1 req-6a73b0d1-a1a1-402a-9a25-d5be149dc83a service nova] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Updating instance_info_cache with network_info: [{"id": "67d17d6c-db1c-49dd-9300-a3903b3f85ab", "address": "fa:16:3e:0c:2e:34", "network": {"id": "327b172e-477f-4070-9bcb-c1216237bde9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-980534088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05181674899f44e7bb6d234643c3e6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap67d17d6c-db", "ovs_interfaceid": "67d17d6c-db1c-49dd-9300-a3903b3f85ab", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1256.088775] env[68194]: DEBUG oslo_concurrency.lockutils [req-7943d674-1a2a-4969-a3b5-f45b2f677bf1 req-6a73b0d1-a1a1-402a-9a25-d5be149dc83a service nova] Releasing lock "refresh_cache-b487291e-1b85-4064-9949-3d8895b6dcae" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1259.269277] env[68194]: DEBUG oslo_concurrency.lockutils [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "b487291e-1b85-4064-9949-3d8895b6dcae" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1268.450964] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Acquiring lock "3da3b410-889a-42c5-9603-f92f689ab5b5" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1268.451307] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Lock "3da3b410-889a-42c5-9603-f92f689ab5b5" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1274.558112] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c357d513-6539-4b96-869c-0eccd3af3b4d tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] Acquiring lock "874d08ae-ce38-4a35-bd3f-5c40a2c9bf97" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1274.558459] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c357d513-6539-4b96-869c-0eccd3af3b4d tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] Lock "874d08ae-ce38-4a35-bd3f-5c40a2c9bf97" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1287.668629] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8c1ed091-5e97-43b1-8522-3899ee592014 tempest-AttachVolumeTestJSON-1211419677 tempest-AttachVolumeTestJSON-1211419677-project-member] Acquiring lock "fa78516a-fe6f-4770-9def-ebe439e87adc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1287.668629] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8c1ed091-5e97-43b1-8522-3899ee592014 tempest-AttachVolumeTestJSON-1211419677 tempest-AttachVolumeTestJSON-1211419677-project-member] Lock "fa78516a-fe6f-4770-9def-ebe439e87adc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1292.739746] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b0e1c300-81b3-4f34-b51c-2d199c8152f0 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] Acquiring lock "92ae0029-0d42-4655-9971-6dfbc07df15d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1292.740031] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b0e1c300-81b3-4f34-b51c-2d199c8152f0 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] Lock "92ae0029-0d42-4655-9971-6dfbc07df15d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1294.416562] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1294.416957] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1297.416812] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1297.844973] env[68194]: WARNING oslo_vmware.rw_handles [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1297.844973] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1297.844973] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1297.844973] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1297.844973] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1297.844973] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1297.844973] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1297.844973] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1297.844973] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1297.844973] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1297.844973] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1297.844973] env[68194]: ERROR oslo_vmware.rw_handles [ 1297.845565] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/7114528b-27fc-46fd-9900-a3d3240a8f0d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1297.848463] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1297.848843] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Copying Virtual Disk [datastore1] vmware_temp/7114528b-27fc-46fd-9900-a3d3240a8f0d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/7114528b-27fc-46fd-9900-a3d3240a8f0d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1297.849282] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-2801b182-5163-47ce-a6a5-63324e7c8184 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1297.859122] env[68194]: DEBUG oslo_vmware.api [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for the task: (returnval){ [ 1297.859122] env[68194]: value = "task-3466871" [ 1297.859122] env[68194]: _type = "Task" [ 1297.859122] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1297.868982] env[68194]: DEBUG oslo_vmware.api [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': task-3466871, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.369900] env[68194]: DEBUG oslo_vmware.exceptions [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1298.370203] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1298.370768] env[68194]: ERROR nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1298.370768] env[68194]: Faults: ['InvalidArgument'] [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Traceback (most recent call last): [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] yield resources [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] self.driver.spawn(context, instance, image_meta, [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] self._fetch_image_if_missing(context, vi) [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] image_cache(vi, tmp_image_ds_loc) [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] vm_util.copy_virtual_disk( [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] session._wait_for_task(vmdk_copy_task) [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] return self.wait_for_task(task_ref) [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] return evt.wait() [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] result = hub.switch() [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] return self.greenlet.switch() [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] self.f(*self.args, **self.kw) [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] raise exceptions.translate_fault(task_info.error) [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Faults: ['InvalidArgument'] [ 1298.370768] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] [ 1298.371926] env[68194]: INFO nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Terminating instance [ 1298.372608] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1298.372817] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1298.373076] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-7b85ce6b-ac20-4927-9b9c-787d58d0eef8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.375707] env[68194]: DEBUG nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1298.375910] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1298.376647] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57882dc3-7cf4-4a41-80b6-34ddede86a78 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.383340] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1298.383554] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-99546b38-6e05-47b2-9522-c144ac931c83 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.385719] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1298.385902] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1298.386840] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5942a46f-631e-47f1-b787-8a99f06212cb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.391540] env[68194]: DEBUG oslo_vmware.api [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Waiting for the task: (returnval){ [ 1298.391540] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]529b9c55-5f0b-79aa-519c-a79269280169" [ 1298.391540] env[68194]: _type = "Task" [ 1298.391540] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1298.398626] env[68194]: DEBUG oslo_vmware.api [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]529b9c55-5f0b-79aa-519c-a79269280169, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1298.901989] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1298.902388] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Creating directory with path [datastore1] vmware_temp/a9b31490-948d-468f-931f-e35f8ef35f67/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1298.902571] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-b2e33cd7-3ed7-40c2-b74f-e33d89bcd6bb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.924350] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Created directory with path [datastore1] vmware_temp/a9b31490-948d-468f-931f-e35f8ef35f67/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1298.924564] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Fetch image to [datastore1] vmware_temp/a9b31490-948d-468f-931f-e35f8ef35f67/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1298.924737] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/a9b31490-948d-468f-931f-e35f8ef35f67/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1298.925616] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca431951-4faa-43be-870c-cfc905b6b0b8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.932590] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af232d5c-f934-4cdd-8e72-491d26b5d9e7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.942046] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9decc3c-fdf6-46c7-a560-3bd6edc164d7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.973340] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11709ebe-d68f-4477-8d3f-b3a88adf442d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.978782] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-b7f5db3e-c73d-4f59-9eb2-3ac4f75df2e8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1298.999067] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1299.014518] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1299.014734] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1299.014914] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Deleting the datastore file [datastore1] 2243c245-bbb3-43b7-89a9-fb727d452885 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1299.015193] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-70f7a1fd-2553-4839-90b4-733afc3233ed {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.021115] env[68194]: DEBUG oslo_vmware.api [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for the task: (returnval){ [ 1299.021115] env[68194]: value = "task-3466873" [ 1299.021115] env[68194]: _type = "Task" [ 1299.021115] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.028601] env[68194]: DEBUG oslo_vmware.api [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': task-3466873, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.191711] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1299.193587] env[68194]: ERROR nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Traceback (most recent call last): [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] result = getattr(controller, method)(*args, **kwargs) [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self._get(image_id) [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] resp, body = self.http_client.get(url, headers=header) [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.request(url, 'GET', **kwargs) [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self._handle_response(resp) [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise exc.from_response(resp, resp.content) [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] During handling of the above exception, another exception occurred: [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Traceback (most recent call last): [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] yield resources [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self.driver.spawn(context, instance, image_meta, [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._fetch_image_if_missing(context, vi) [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] image_fetch(context, vi, tmp_image_ds_loc) [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] images.fetch_image( [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1299.193587] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] metadata = IMAGE_API.get(context, image_ref) [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return session.show(context, image_id, [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] _reraise_translated_image_exception(image_id) [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise new_exc.with_traceback(exc_trace) [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] result = getattr(controller, method)(*args, **kwargs) [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self._get(image_id) [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] resp, body = self.http_client.get(url, headers=header) [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.request(url, 'GET', **kwargs) [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self._handle_response(resp) [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise exc.from_response(resp, resp.content) [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] nova.exception.ImageNotAuthorized: Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. [ 1299.195077] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1299.195077] env[68194]: INFO nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Terminating instance [ 1299.195853] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1299.195853] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1299.195923] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-381f805a-c568-4a47-9fe5-63159922cccc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.198904] env[68194]: DEBUG nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1299.199042] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1299.199855] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d391b755-2cd7-40f0-be82-4d4025669ea8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.207501] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1299.207730] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a36c5021-aebe-44a0-b653-0fe122efe556 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.209957] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1299.210145] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1299.211117] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-03f3a8ba-7960-48d3-8a1b-e0c282e44460 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.215651] env[68194]: DEBUG oslo_vmware.api [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Waiting for the task: (returnval){ [ 1299.215651] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5256d8d4-ee57-2396-cc33-f3f09a385966" [ 1299.215651] env[68194]: _type = "Task" [ 1299.215651] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.227519] env[68194]: DEBUG oslo_vmware.api [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5256d8d4-ee57-2396-cc33-f3f09a385966, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.265073] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1299.265073] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1299.265285] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Deleting the datastore file [datastore1] 108001a3-ff36-475b-a7a5-8e0e197c62a8 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1299.265387] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-cfe44430-8236-4610-8c50-8c4bee925a09 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.272401] env[68194]: DEBUG oslo_vmware.api [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Waiting for the task: (returnval){ [ 1299.272401] env[68194]: value = "task-3466875" [ 1299.272401] env[68194]: _type = "Task" [ 1299.272401] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1299.279907] env[68194]: DEBUG oslo_vmware.api [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Task: {'id': task-3466875, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1299.415674] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1299.415922] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1299.416098] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1299.530873] env[68194]: DEBUG oslo_vmware.api [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': task-3466873, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.115804} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.531149] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1299.531338] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1299.531508] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1299.531681] env[68194]: INFO nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Took 1.16 seconds to destroy the instance on the hypervisor. [ 1299.533732] env[68194]: DEBUG nova.compute.claims [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1299.533929] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1299.534162] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1299.725515] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1299.725831] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Creating directory with path [datastore1] vmware_temp/51e5411b-975c-4580-b8e9-7d502fb52d60/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1299.726082] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-9ce83a63-095d-42f8-9d04-2f5c1d221f9e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.737843] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Created directory with path [datastore1] vmware_temp/51e5411b-975c-4580-b8e9-7d502fb52d60/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1299.738140] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Fetch image to [datastore1] vmware_temp/51e5411b-975c-4580-b8e9-7d502fb52d60/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1299.738394] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/51e5411b-975c-4580-b8e9-7d502fb52d60/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1299.739478] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-17ec081a-512b-4de9-bdb5-233a0bb542ad {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.750962] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaf395f9-0c98-4069-8857-957e9481056d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.766646] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d74275f9-59b8-4516-bb89-7533753aef16 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.806782] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41b3a934-ae30-4284-aa6d-c1edacb37d43 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.812166] env[68194]: DEBUG oslo_vmware.api [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Task: {'id': task-3466875, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071222} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1299.812750] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1299.812965] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1299.813184] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1299.813370] env[68194]: INFO nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Took 0.61 seconds to destroy the instance on the hypervisor. [ 1299.816688] env[68194]: DEBUG nova.compute.claims [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1299.816860] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1299.817084] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-75222d0c-c468-4265-8c4c-12c9e9f86159 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.842073] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-27cf61fa-977d-4a66-b6b4-c801c3c4ec6e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.849097] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8443e474-ae65-4899-88d3-fa1df975e748 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.853559] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1299.882649] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf4da1e-5425-4658-b030-b8d02d56a4de {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.889846] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5c2b53f-b3e1-4924-ae7d-ff2039635f0a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1299.905917] env[68194]: DEBUG nova.compute.provider_tree [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1299.914432] env[68194]: DEBUG nova.scheduler.client.report [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1299.922122] env[68194]: DEBUG oslo_vmware.rw_handles [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/51e5411b-975c-4580-b8e9-7d502fb52d60/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1299.981638] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.447s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1299.982342] env[68194]: ERROR nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1299.982342] env[68194]: Faults: ['InvalidArgument'] [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Traceback (most recent call last): [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] self.driver.spawn(context, instance, image_meta, [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] self._fetch_image_if_missing(context, vi) [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] image_cache(vi, tmp_image_ds_loc) [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] vm_util.copy_virtual_disk( [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] session._wait_for_task(vmdk_copy_task) [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] return self.wait_for_task(task_ref) [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] return evt.wait() [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] result = hub.switch() [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] return self.greenlet.switch() [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] self.f(*self.args, **self.kw) [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] raise exceptions.translate_fault(task_info.error) [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Faults: ['InvalidArgument'] [ 1299.982342] env[68194]: ERROR nova.compute.manager [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] [ 1299.983426] env[68194]: DEBUG nova.compute.utils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1299.986135] env[68194]: DEBUG nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Build of instance 2243c245-bbb3-43b7-89a9-fb727d452885 was re-scheduled: A specified parameter was not correct: fileType [ 1299.986135] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1299.986565] env[68194]: DEBUG nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1299.986775] env[68194]: DEBUG nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1299.986978] env[68194]: DEBUG nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1299.987192] env[68194]: DEBUG nova.network.neutron [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1299.988996] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.172s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1299.992399] env[68194]: DEBUG oslo_vmware.rw_handles [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1299.992575] env[68194]: DEBUG oslo_vmware.rw_handles [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/51e5411b-975c-4580-b8e9-7d502fb52d60/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1300.278213] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c45327a6-f6a6-44fe-b475-1b5762651823 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.285632] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-30a92173-4e9d-422b-b8b4-9523ccf858cc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.321161] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbc63f09-d92e-4bb3-8a0e-921b131528a7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.328873] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ce00e74-9050-40f1-830e-a972828d7975 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.333568] env[68194]: DEBUG nova.network.neutron [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.344415] env[68194]: DEBUG nova.compute.provider_tree [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1300.346058] env[68194]: INFO nova.compute.manager [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Took 0.36 seconds to deallocate network for instance. [ 1300.355728] env[68194]: DEBUG nova.scheduler.client.report [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1300.369048] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.380s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.369983] env[68194]: ERROR nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Traceback (most recent call last): [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] result = getattr(controller, method)(*args, **kwargs) [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self._get(image_id) [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] resp, body = self.http_client.get(url, headers=header) [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.request(url, 'GET', **kwargs) [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self._handle_response(resp) [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise exc.from_response(resp, resp.content) [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] During handling of the above exception, another exception occurred: [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Traceback (most recent call last): [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self.driver.spawn(context, instance, image_meta, [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._fetch_image_if_missing(context, vi) [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] image_fetch(context, vi, tmp_image_ds_loc) [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] images.fetch_image( [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] metadata = IMAGE_API.get(context, image_ref) [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1300.369983] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return session.show(context, image_id, [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] _reraise_translated_image_exception(image_id) [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise new_exc.with_traceback(exc_trace) [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] result = getattr(controller, method)(*args, **kwargs) [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self._get(image_id) [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] resp, body = self.http_client.get(url, headers=header) [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.request(url, 'GET', **kwargs) [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self._handle_response(resp) [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise exc.from_response(resp, resp.content) [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] nova.exception.ImageNotAuthorized: Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. [ 1300.371120] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1300.371120] env[68194]: DEBUG nova.compute.utils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1300.372523] env[68194]: DEBUG nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Build of instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 was re-scheduled: Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1300.372652] env[68194]: DEBUG nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1300.373501] env[68194]: DEBUG nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1300.373501] env[68194]: DEBUG nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1300.373501] env[68194]: DEBUG nova.network.neutron [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1300.411840] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.431921] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1300.442374] env[68194]: INFO nova.scheduler.client.report [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Deleted allocations for instance 2243c245-bbb3-43b7-89a9-fb727d452885 [ 1300.450573] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1300.450817] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1300.450986] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.451152] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1300.453756] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c2f2654a-8158-4acd-bc83-2c960be0024d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.462725] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-976e9df4-5fc5-4803-9726-8b175ee0d35d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.468978] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8fe1c72e-5522-4584-90d3-6094d0bbdef5 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "2243c245-bbb3-43b7-89a9-fb727d452885" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 649.265s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.471069] env[68194]: DEBUG oslo_concurrency.lockutils [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "2243c245-bbb3-43b7-89a9-fb727d452885" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 450.260s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1300.471279] env[68194]: DEBUG oslo_concurrency.lockutils [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "2243c245-bbb3-43b7-89a9-fb727d452885-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1300.471524] env[68194]: DEBUG oslo_concurrency.lockutils [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "2243c245-bbb3-43b7-89a9-fb727d452885-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1300.471643] env[68194]: DEBUG oslo_concurrency.lockutils [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "2243c245-bbb3-43b7-89a9-fb727d452885-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.485202] env[68194]: INFO nova.compute.manager [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Terminating instance [ 1300.485962] env[68194]: DEBUG nova.compute.manager [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1300.486326] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1300.486977] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6e651050-8067-430b-a282-2c37fe96b0d8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.490529] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-29f7dde2-823c-4ea7-8c4d-2ee8d825a24b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.492779] env[68194]: DEBUG nova.compute.manager [None req-81e9ff32-fa68-4d50-a7a5-fff089a2a377 tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] [instance: 939645a5-ef9a-4951-ada2-6bd95cec173f] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1300.501687] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-796fd80e-7c92-40d4-ad47-a9c2bd8d71f4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.507936] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9a543c9-dfe9-4363-877d-48422f295f80 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1300.520791] env[68194]: DEBUG nova.compute.manager [None req-81e9ff32-fa68-4d50-a7a5-fff089a2a377 tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] [instance: 939645a5-ef9a-4951-ada2-6bd95cec173f] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1300.546623] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180946MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1300.546793] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1300.546994] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1300.562362] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2243c245-bbb3-43b7-89a9-fb727d452885 could not be found. [ 1300.562362] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1300.562362] env[68194]: INFO nova.compute.manager [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Took 0.08 seconds to destroy the instance on the hypervisor. [ 1300.562362] env[68194]: DEBUG oslo.service.loopingcall [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1300.562528] env[68194]: DEBUG neutronclient.v2_0.client [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68194) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1300.564561] env[68194]: ERROR nova.compute.manager [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Traceback (most recent call last): [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] result = getattr(controller, method)(*args, **kwargs) [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self._get(image_id) [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] resp, body = self.http_client.get(url, headers=header) [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.request(url, 'GET', **kwargs) [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self._handle_response(resp) [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise exc.from_response(resp, resp.content) [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] During handling of the above exception, another exception occurred: [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Traceback (most recent call last): [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self.driver.spawn(context, instance, image_meta, [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._fetch_image_if_missing(context, vi) [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] image_fetch(context, vi, tmp_image_ds_loc) [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] images.fetch_image( [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] metadata = IMAGE_API.get(context, image_ref) [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1300.564561] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return session.show(context, image_id, [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] _reraise_translated_image_exception(image_id) [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise new_exc.with_traceback(exc_trace) [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] result = getattr(controller, method)(*args, **kwargs) [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self._get(image_id) [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] resp, body = self.http_client.get(url, headers=header) [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.request(url, 'GET', **kwargs) [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self._handle_response(resp) [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise exc.from_response(resp, resp.content) [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] nova.exception.ImageNotAuthorized: Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] During handling of the above exception, another exception occurred: [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Traceback (most recent call last): [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 2430, in _do_build_and_run_instance [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._build_and_run_instance(context, instance, image, [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 2722, in _build_and_run_instance [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise exception.RescheduledException( [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] nova.exception.RescheduledException: Build of instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 was re-scheduled: Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] During handling of the above exception, another exception occurred: [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Traceback (most recent call last): [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] ret = obj(*args, **kwargs) [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] exception_handler_v20(status_code, error_body) [ 1300.565675] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise client_exc(message=error_message, [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Neutron server returns request_ids: ['req-cada14e1-b62c-49b0-9a40-a530a1917f9d'] [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] During handling of the above exception, another exception occurred: [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Traceback (most recent call last): [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 3019, in _cleanup_allocated_networks [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._deallocate_network(context, instance, requested_networks) [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self.network_api.deallocate_for_instance( [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] data = neutron.list_ports(**search_opts) [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] ret = obj(*args, **kwargs) [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.list('ports', self.ports_path, retrieve_all, [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] ret = obj(*args, **kwargs) [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] for r in self._pagination(collection, path, **params): [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] res = self.get(path, params=params) [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] ret = obj(*args, **kwargs) [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.retry_request("GET", action, body=body, [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] ret = obj(*args, **kwargs) [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.do_request(method, action, body=body, [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] ret = obj(*args, **kwargs) [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._handle_fault_response(status_code, replybody, resp) [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise exception.Unauthorized() [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] nova.exception.Unauthorized: Not authorized. [ 1300.566809] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1300.570129] env[68194]: DEBUG nova.compute.manager [-] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1300.570129] env[68194]: DEBUG nova.network.neutron [-] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1300.570564] env[68194]: DEBUG oslo_concurrency.lockutils [None req-81e9ff32-fa68-4d50-a7a5-fff089a2a377 tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] Lock "939645a5-ef9a-4951-ada2-6bd95cec173f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 224.402s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.590618] env[68194]: DEBUG nova.compute.manager [None req-2ca70a7a-7eb7-4054-a72e-286e2f119956 tempest-AttachVolumeShelveTestJSON-734519872 tempest-AttachVolumeShelveTestJSON-734519872-project-member] [instance: 9a4d19c2-79b2-4323-a68a-6ba2c82e4d13] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1300.607402] env[68194]: DEBUG nova.network.neutron [-] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.615578] env[68194]: INFO nova.compute.manager [-] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] Took 0.05 seconds to deallocate network for instance. [ 1300.623583] env[68194]: DEBUG nova.compute.manager [None req-2ca70a7a-7eb7-4054-a72e-286e2f119956 tempest-AttachVolumeShelveTestJSON-734519872 tempest-AttachVolumeShelveTestJSON-734519872-project-member] [instance: 9a4d19c2-79b2-4323-a68a-6ba2c82e4d13] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1300.631027] env[68194]: INFO nova.scheduler.client.report [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Deleted allocations for instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 [ 1300.637286] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1300.637445] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1300.637586] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ff16d7c1-a601-4ac6-be52-823727c8b843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1300.637709] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1300.637830] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ce0f1886-189f-4ab3-9ed6-376dce542f5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1300.637950] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1300.638101] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1300.638220] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcb53c97-8d95-4d67-b310-d19087b0b298 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1300.638341] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b487291e-1b85-4064-9949-3d8895b6dcae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1300.656825] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2ca70a7a-7eb7-4054-a72e-286e2f119956 tempest-AttachVolumeShelveTestJSON-734519872 tempest-AttachVolumeShelveTestJSON-734519872-project-member] Lock "9a4d19c2-79b2-4323-a68a-6ba2c82e4d13" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.972s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.659294] env[68194]: DEBUG oslo_concurrency.lockutils [None req-012c90f2-f014-47a4-ab20-c8739653db04 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Lock "108001a3-ff36-475b-a7a5-8e0e197c62a8" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 638.439s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.659856] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1300.662300] env[68194]: DEBUG oslo_concurrency.lockutils [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Lock "108001a3-ff36-475b-a7a5-8e0e197c62a8" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 441.041s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1300.662558] env[68194]: DEBUG oslo_concurrency.lockutils [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquiring lock "108001a3-ff36-475b-a7a5-8e0e197c62a8-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1300.662735] env[68194]: DEBUG oslo_concurrency.lockutils [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Lock "108001a3-ff36-475b-a7a5-8e0e197c62a8-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1300.662902] env[68194]: DEBUG oslo_concurrency.lockutils [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Lock "108001a3-ff36-475b-a7a5-8e0e197c62a8-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.664559] env[68194]: INFO nova.compute.manager [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Terminating instance [ 1300.671797] env[68194]: DEBUG oslo_concurrency.lockutils [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquiring lock "refresh_cache-108001a3-ff36-475b-a7a5-8e0e197c62a8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1300.671970] env[68194]: DEBUG oslo_concurrency.lockutils [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Acquired lock "refresh_cache-108001a3-ff36-475b-a7a5-8e0e197c62a8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1300.672386] env[68194]: DEBUG nova.network.neutron [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1300.673263] env[68194]: DEBUG nova.compute.manager [None req-225f0657-6967-49b5-ad9a-85a982f49140 tempest-ServerRescueNegativeTestJSON-1253406653 tempest-ServerRescueNegativeTestJSON-1253406653-project-member] [instance: de2cd600-0b9a-45f2-a0e7-b78dfd52d0f7] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1300.677092] env[68194]: DEBUG nova.compute.manager [None req-5a739cbb-877a-4bf5-876f-760b16467ebd tempest-AttachVolumeTestJSON-1211419677 tempest-AttachVolumeTestJSON-1211419677-project-member] [instance: 4982c984-c85f-4c23-b643-9ad8a7a4f405] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1300.683796] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1300.694033] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bf9766c7-1495-4edd-92bd-06a0d036855e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1300.705088] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1300.742652] env[68194]: DEBUG nova.compute.manager [None req-225f0657-6967-49b5-ad9a-85a982f49140 tempest-ServerRescueNegativeTestJSON-1253406653 tempest-ServerRescueNegativeTestJSON-1253406653-project-member] [instance: de2cd600-0b9a-45f2-a0e7-b78dfd52d0f7] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1300.747880] env[68194]: DEBUG nova.compute.manager [None req-5a739cbb-877a-4bf5-876f-760b16467ebd tempest-AttachVolumeTestJSON-1211419677 tempest-AttachVolumeTestJSON-1211419677-project-member] [instance: 4982c984-c85f-4c23-b643-9ad8a7a4f405] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1300.749827] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ada24904-c85b-4af9-be4c-afc8514b7307 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1300.765880] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fa0796d0-14e5-4bcc-9571-3193f4c1185e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1300.768928] env[68194]: DEBUG oslo_concurrency.lockutils [None req-865d3978-83b5-4ab5-952e-f6c6178079d2 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "2243c245-bbb3-43b7-89a9-fb727d452885" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.298s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.771391] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "2243c245-bbb3-43b7-89a9-fb727d452885" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 89.305s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1300.771391] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2243c245-bbb3-43b7-89a9-fb727d452885] During sync_power_state the instance has a pending task (deleting). Skip. [ 1300.771391] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "2243c245-bbb3-43b7-89a9-fb727d452885" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.777201] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3da3b410-889a-42c5-9603-f92f689ab5b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1300.780816] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5a739cbb-877a-4bf5-876f-760b16467ebd tempest-AttachVolumeTestJSON-1211419677 tempest-AttachVolumeTestJSON-1211419677-project-member] Lock "4982c984-c85f-4c23-b643-9ad8a7a4f405" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 211.682s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.784770] env[68194]: DEBUG oslo_concurrency.lockutils [None req-225f0657-6967-49b5-ad9a-85a982f49140 tempest-ServerRescueNegativeTestJSON-1253406653 tempest-ServerRescueNegativeTestJSON-1253406653-project-member] Lock "de2cd600-0b9a-45f2-a0e7-b78dfd52d0f7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 212.274s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.787440] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 874d08ae-ce38-4a35-bd3f-5c40a2c9bf97 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1300.791989] env[68194]: DEBUG nova.compute.manager [None req-da20857b-6d94-4d17-9402-da6a623e2957 tempest-ServerRescueNegativeTestJSON-1253406653 tempest-ServerRescueNegativeTestJSON-1253406653-project-member] [instance: 3f7dc638-fa6b-40b0-90dc-b4ddf0b2b99b] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1300.797441] env[68194]: DEBUG nova.compute.manager [None req-b68fc678-ad22-4cf7-a1cf-02ce687f8436 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] [instance: 4635c03b-1415-4ad9-8825-8997c68ad9e7] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1300.800348] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fa78516a-fe6f-4770-9def-ebe439e87adc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1300.814607] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 92ae0029-0d42-4655-9971-6dfbc07df15d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1300.814840] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1300.814982] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1300.824608] env[68194]: DEBUG nova.compute.manager [None req-da20857b-6d94-4d17-9402-da6a623e2957 tempest-ServerRescueNegativeTestJSON-1253406653 tempest-ServerRescueNegativeTestJSON-1253406653-project-member] [instance: 3f7dc638-fa6b-40b0-90dc-b4ddf0b2b99b] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1300.826589] env[68194]: DEBUG nova.compute.manager [None req-b68fc678-ad22-4cf7-a1cf-02ce687f8436 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] [instance: 4635c03b-1415-4ad9-8825-8997c68ad9e7] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1300.846960] env[68194]: DEBUG oslo_concurrency.lockutils [None req-da20857b-6d94-4d17-9402-da6a623e2957 tempest-ServerRescueNegativeTestJSON-1253406653 tempest-ServerRescueNegativeTestJSON-1253406653-project-member] Lock "3f7dc638-fa6b-40b0-90dc-b4ddf0b2b99b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.967s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.853102] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b68fc678-ad22-4cf7-a1cf-02ce687f8436 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] Lock "4635c03b-1415-4ad9-8825-8997c68ad9e7" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 204.783s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1300.855928] env[68194]: DEBUG nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1300.862859] env[68194]: DEBUG nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1300.924341] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1300.927194] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1300.988725] env[68194]: DEBUG nova.network.neutron [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Updating instance_info_cache with network_info: [{"id": "6512e736-8269-4cee-b2c7-43add091486d", "address": "fa:16:3e:42:f1:77", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.90", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6512e736-82", "ovs_interfaceid": "6512e736-8269-4cee-b2c7-43add091486d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1300.999595] env[68194]: DEBUG oslo_concurrency.lockutils [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Releasing lock "refresh_cache-108001a3-ff36-475b-a7a5-8e0e197c62a8" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1301.000197] env[68194]: DEBUG nova.compute.manager [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1301.000444] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1301.001112] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-3ca27d29-ddf2-45ac-a864-5b11e66b6a32 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.014865] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-759408f3-cd20-4c12-af0a-ec93d7346926 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.048124] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 108001a3-ff36-475b-a7a5-8e0e197c62a8 could not be found. [ 1301.048366] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1301.051241] env[68194]: INFO nova.compute.manager [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1301.051546] env[68194]: DEBUG oslo.service.loopingcall [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1301.054234] env[68194]: DEBUG nova.compute.manager [-] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1301.054340] env[68194]: DEBUG nova.network.neutron [-] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1301.103400] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-393ee94b-471a-4244-b50d-56a244f58408 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.111667] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46f7100c-25dc-4e1a-97c2-478206c6932d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.143279] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf03a751-1a03-4709-90f5-8f8ba271378e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.151414] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ac6c599-a563-4f89-896b-06448f998f67 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.165649] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.167084] env[68194]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68194) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1301.167328] env[68194]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-dd0dd2a1-ad29-45e2-8082-940918755e49'] [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1301.167981] env[68194]: ERROR oslo.service.loopingcall [ 1301.169117] env[68194]: ERROR nova.compute.manager [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1301.174893] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1301.187416] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1301.187532] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.641s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1301.187871] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.264s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1301.189287] env[68194]: INFO nova.compute.claims [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1301.214417] env[68194]: ERROR nova.compute.manager [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Traceback (most recent call last): [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] ret = obj(*args, **kwargs) [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] exception_handler_v20(status_code, error_body) [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise client_exc(message=error_message, [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Neutron server returns request_ids: ['req-dd0dd2a1-ad29-45e2-8082-940918755e49'] [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] During handling of the above exception, another exception occurred: [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Traceback (most recent call last): [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._delete_instance(context, instance, bdms) [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._shutdown_instance(context, instance, bdms) [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._try_deallocate_network(context, instance, requested_networks) [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] with excutils.save_and_reraise_exception(): [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self.force_reraise() [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise self.value [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] _deallocate_network_with_retries() [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return evt.wait() [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] result = hub.switch() [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.greenlet.switch() [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] result = func(*self.args, **self.kw) [ 1301.214417] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] result = f(*args, **kwargs) [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._deallocate_network( [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self.network_api.deallocate_for_instance( [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] data = neutron.list_ports(**search_opts) [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] ret = obj(*args, **kwargs) [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.list('ports', self.ports_path, retrieve_all, [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] ret = obj(*args, **kwargs) [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] for r in self._pagination(collection, path, **params): [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] res = self.get(path, params=params) [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] ret = obj(*args, **kwargs) [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.retry_request("GET", action, body=body, [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] ret = obj(*args, **kwargs) [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] return self.do_request(method, action, body=body, [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] ret = obj(*args, **kwargs) [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] self._handle_fault_response(status_code, replybody, resp) [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1301.215288] env[68194]: ERROR nova.compute.manager [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] [ 1301.243877] env[68194]: DEBUG oslo_concurrency.lockutils [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Lock "108001a3-ff36-475b-a7a5-8e0e197c62a8" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.582s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1301.246164] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "108001a3-ff36-475b-a7a5-8e0e197c62a8" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 89.780s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1301.246164] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] During sync_power_state the instance has a pending task (deleting). Skip. [ 1301.246164] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "108001a3-ff36-475b-a7a5-8e0e197c62a8" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1301.298009] env[68194]: INFO nova.compute.manager [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] [instance: 108001a3-ff36-475b-a7a5-8e0e197c62a8] Successfully reverted task state from None on failure for instance. [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server [None req-919b89e3-89d4-44fa-a87d-bdc8b46cdbf5 tempest-ServerDiagnosticsNegativeTest-87403121 tempest-ServerDiagnosticsNegativeTest-87403121-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-dd0dd2a1-ad29-45e2-8082-940918755e49'] [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1301.303248] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1301.304483] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1301.305664] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1301.305664] env[68194]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1301.305664] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1301.305664] env[68194]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1301.305664] env[68194]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1301.305664] env[68194]: ERROR oslo_messaging.rpc.server [ 1301.449931] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ad022a83-be8a-4f54-9158-74069fecaaa2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.457900] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77e7132-3cd2-4193-a074-e1a2d93f1d29 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.487293] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ee760768-4ed1-4abf-9b07-c92e6890f724 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.494510] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e5ecf8a-e3ca-4fbf-9c11-09b3be3b5691 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.508909] env[68194]: DEBUG nova.compute.provider_tree [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.518928] env[68194]: DEBUG nova.scheduler.client.report [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1301.533779] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.346s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1301.534261] env[68194]: DEBUG nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1301.536444] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.609s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1301.537968] env[68194]: INFO nova.compute.claims [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1301.564719] env[68194]: DEBUG nova.compute.utils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1301.565880] env[68194]: DEBUG nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1301.566064] env[68194]: DEBUG nova.network.neutron [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1301.574258] env[68194]: DEBUG nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1301.636307] env[68194]: DEBUG nova.policy [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '558d53533dd4415a9c3c66f01d3ce6d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0fb97649d1045689a80d83477a6f25e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1301.638981] env[68194]: DEBUG nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1301.663969] env[68194]: DEBUG nova.virt.hardware [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1301.664141] env[68194]: DEBUG nova.virt.hardware [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1301.664373] env[68194]: DEBUG nova.virt.hardware [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1301.664564] env[68194]: DEBUG nova.virt.hardware [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1301.664710] env[68194]: DEBUG nova.virt.hardware [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1301.664853] env[68194]: DEBUG nova.virt.hardware [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1301.665081] env[68194]: DEBUG nova.virt.hardware [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1301.665247] env[68194]: DEBUG nova.virt.hardware [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1301.665417] env[68194]: DEBUG nova.virt.hardware [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1301.665579] env[68194]: DEBUG nova.virt.hardware [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1301.665752] env[68194]: DEBUG nova.virt.hardware [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1301.666607] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36ee2cf8-9e80-47eb-9891-a63cc82f4da6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.676972] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78b66af4-216e-423c-a7f4-7a98e7d8ad13 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.799268] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7617d78e-bd15-4da5-91d7-a793d97f8ee6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.806525] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b13b8ee9-610e-4d91-b883-03b82186d62c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.845199] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e1ae43d8-3323-4b9b-a800-8d8816134330 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.852777] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-23dec36a-63b4-4b0b-ab66-ec4779ee0b6a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1301.867222] env[68194]: DEBUG nova.compute.provider_tree [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1301.877017] env[68194]: DEBUG nova.scheduler.client.report [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1301.892931] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.356s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1301.893439] env[68194]: DEBUG nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1301.925027] env[68194]: DEBUG nova.compute.utils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1301.926815] env[68194]: DEBUG nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1301.926987] env[68194]: DEBUG nova.network.neutron [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1301.935422] env[68194]: DEBUG nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1302.007483] env[68194]: DEBUG nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1302.014678] env[68194]: DEBUG nova.policy [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '9cf127b4e10b49ccafbd589fb9a3457e', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '15473576b8384b0ebf735cc593dbe65d', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1302.027467] env[68194]: DEBUG nova.virt.hardware [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1302.027711] env[68194]: DEBUG nova.virt.hardware [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1302.027871] env[68194]: DEBUG nova.virt.hardware [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1302.028062] env[68194]: DEBUG nova.virt.hardware [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1302.028357] env[68194]: DEBUG nova.virt.hardware [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1302.028419] env[68194]: DEBUG nova.virt.hardware [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1302.028586] env[68194]: DEBUG nova.virt.hardware [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1302.028750] env[68194]: DEBUG nova.virt.hardware [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1302.028917] env[68194]: DEBUG nova.virt.hardware [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1302.029093] env[68194]: DEBUG nova.virt.hardware [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1302.029266] env[68194]: DEBUG nova.virt.hardware [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1302.030314] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34597e6e-a851-433b-ba66-abcacfc97728 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.038771] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bca8871d-28a6-4967-bc05-0f70ffafa3e2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1302.176294] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.176481] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1302.176643] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1302.197668] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1302.197923] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1302.197959] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1302.198312] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1302.198484] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1302.198618] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1302.198783] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1302.198921] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1302.199064] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1302.199213] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1302.199347] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1302.199826] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.263868] env[68194]: DEBUG nova.network.neutron [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Successfully created port: 7fec9dce-9912-42f4-9ac9-7fcde1112591 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1302.435351] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1302.562456] env[68194]: DEBUG nova.network.neutron [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Successfully created port: bc4b0880-3567-4983-a86b-faceec1ad054 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1302.814840] env[68194]: DEBUG nova.network.neutron [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Successfully created port: 63ec1424-b14d-41ee-90b3-c0c92251a734 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1303.301860] env[68194]: DEBUG nova.network.neutron [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Successfully created port: 92129502-f64c-4861-90ce-8dad6e6598c1 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1303.401865] env[68194]: DEBUG nova.compute.manager [req-c72da9da-8430-4504-9daa-f0e1362ca096 req-d0f7628d-b809-4ce5-adf3-c5b2a0901562 service nova] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Received event network-vif-plugged-bc4b0880-3567-4983-a86b-faceec1ad054 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1303.402094] env[68194]: DEBUG oslo_concurrency.lockutils [req-c72da9da-8430-4504-9daa-f0e1362ca096 req-d0f7628d-b809-4ce5-adf3-c5b2a0901562 service nova] Acquiring lock "4fb56c2b-1556-479e-9d4e-136a8d1d15ad-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1303.402306] env[68194]: DEBUG oslo_concurrency.lockutils [req-c72da9da-8430-4504-9daa-f0e1362ca096 req-d0f7628d-b809-4ce5-adf3-c5b2a0901562 service nova] Lock "4fb56c2b-1556-479e-9d4e-136a8d1d15ad-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1303.402514] env[68194]: DEBUG oslo_concurrency.lockutils [req-c72da9da-8430-4504-9daa-f0e1362ca096 req-d0f7628d-b809-4ce5-adf3-c5b2a0901562 service nova] Lock "4fb56c2b-1556-479e-9d4e-136a8d1d15ad-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1303.402639] env[68194]: DEBUG nova.compute.manager [req-c72da9da-8430-4504-9daa-f0e1362ca096 req-d0f7628d-b809-4ce5-adf3-c5b2a0901562 service nova] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] No waiting events found dispatching network-vif-plugged-bc4b0880-3567-4983-a86b-faceec1ad054 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1303.402802] env[68194]: WARNING nova.compute.manager [req-c72da9da-8430-4504-9daa-f0e1362ca096 req-d0f7628d-b809-4ce5-adf3-c5b2a0901562 service nova] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Received unexpected event network-vif-plugged-bc4b0880-3567-4983-a86b-faceec1ad054 for instance with vm_state building and task_state spawning. [ 1303.594659] env[68194]: DEBUG nova.network.neutron [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Successfully updated port: bc4b0880-3567-4983-a86b-faceec1ad054 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1303.611714] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Acquiring lock "refresh_cache-4fb56c2b-1556-479e-9d4e-136a8d1d15ad" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1303.611873] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Acquired lock "refresh_cache-4fb56c2b-1556-479e-9d4e-136a8d1d15ad" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1303.612341] env[68194]: DEBUG nova.network.neutron [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1303.697759] env[68194]: DEBUG nova.network.neutron [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1303.915135] env[68194]: DEBUG nova.network.neutron [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Updating instance_info_cache with network_info: [{"id": "bc4b0880-3567-4983-a86b-faceec1ad054", "address": "fa:16:3e:9e:f6:a1", "network": {"id": "61e93d57-1da9-49f8-9fa8-83975be92955", "bridge": "br-int", "label": "tempest-ServersTestJSON-2110202855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15473576b8384b0ebf735cc593dbe65d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc4b0880-35", "ovs_interfaceid": "bc4b0880-3567-4983-a86b-faceec1ad054", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1303.930044] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Releasing lock "refresh_cache-4fb56c2b-1556-479e-9d4e-136a8d1d15ad" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1303.930044] env[68194]: DEBUG nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Instance network_info: |[{"id": "bc4b0880-3567-4983-a86b-faceec1ad054", "address": "fa:16:3e:9e:f6:a1", "network": {"id": "61e93d57-1da9-49f8-9fa8-83975be92955", "bridge": "br-int", "label": "tempest-ServersTestJSON-2110202855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15473576b8384b0ebf735cc593dbe65d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc4b0880-35", "ovs_interfaceid": "bc4b0880-3567-4983-a86b-faceec1ad054", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1303.930176] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9e:f6:a1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '19440099-773e-4a31-b82e-84a4daa5d8fe', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bc4b0880-3567-4983-a86b-faceec1ad054', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1303.941336] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Creating folder: Project (15473576b8384b0ebf735cc593dbe65d). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1303.941336] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-7aa77b17-5809-4c0e-a96c-cc8a768e3840 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.951899] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Created folder: Project (15473576b8384b0ebf735cc593dbe65d) in parent group-v692426. [ 1303.952111] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Creating folder: Instances. Parent ref: group-v692506. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1303.952342] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-b1bf2b07-629c-49e7-a7da-68d29a0ccbf3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.962412] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Created folder: Instances in parent group-v692506. [ 1303.962675] env[68194]: DEBUG oslo.service.loopingcall [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1303.962861] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1303.963085] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-ba3a15ce-909e-42ed-92a8-17ac139bc7cf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1303.986347] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1303.986347] env[68194]: value = "task-3466878" [ 1303.986347] env[68194]: _type = "Task" [ 1303.986347] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1303.993957] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466878, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.133873] env[68194]: DEBUG nova.network.neutron [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Successfully updated port: 7fec9dce-9912-42f4-9ac9-7fcde1112591 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1304.496746] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466878, 'name': CreateVM_Task, 'duration_secs': 0.279243} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1304.497088] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1304.497592] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1304.497762] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1304.498182] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1304.498375] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6a5c5ea0-427e-4ea0-a5da-dcc387cdf85e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1304.502634] env[68194]: DEBUG oslo_vmware.api [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Waiting for the task: (returnval){ [ 1304.502634] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]521f0c65-ebef-c598-b6ad-d13f526020b4" [ 1304.502634] env[68194]: _type = "Task" [ 1304.502634] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1304.509833] env[68194]: DEBUG oslo_vmware.api [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]521f0c65-ebef-c598-b6ad-d13f526020b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1304.883890] env[68194]: DEBUG nova.network.neutron [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Successfully updated port: 63ec1424-b14d-41ee-90b3-c0c92251a734 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1305.013540] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1305.013931] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1305.014188] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1305.463151] env[68194]: DEBUG nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Received event network-changed-bc4b0880-3567-4983-a86b-faceec1ad054 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1305.463594] env[68194]: DEBUG nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Refreshing instance network info cache due to event network-changed-bc4b0880-3567-4983-a86b-faceec1ad054. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1305.463933] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Acquiring lock "refresh_cache-4fb56c2b-1556-479e-9d4e-136a8d1d15ad" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1305.464185] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Acquired lock "refresh_cache-4fb56c2b-1556-479e-9d4e-136a8d1d15ad" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1305.464453] env[68194]: DEBUG nova.network.neutron [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Refreshing network info cache for port bc4b0880-3567-4983-a86b-faceec1ad054 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1305.485472] env[68194]: DEBUG nova.network.neutron [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Successfully updated port: 92129502-f64c-4861-90ce-8dad6e6598c1 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1305.496340] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "refresh_cache-5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1305.496469] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquired lock "refresh_cache-5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1305.496665] env[68194]: DEBUG nova.network.neutron [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1305.565485] env[68194]: DEBUG nova.network.neutron [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1305.900562] env[68194]: DEBUG nova.network.neutron [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Updated VIF entry in instance network info cache for port bc4b0880-3567-4983-a86b-faceec1ad054. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1305.900953] env[68194]: DEBUG nova.network.neutron [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Updating instance_info_cache with network_info: [{"id": "bc4b0880-3567-4983-a86b-faceec1ad054", "address": "fa:16:3e:9e:f6:a1", "network": {"id": "61e93d57-1da9-49f8-9fa8-83975be92955", "bridge": "br-int", "label": "tempest-ServersTestJSON-2110202855-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "15473576b8384b0ebf735cc593dbe65d", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "19440099-773e-4a31-b82e-84a4daa5d8fe", "external-id": "nsx-vlan-transportzone-752", "segmentation_id": 752, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbc4b0880-35", "ovs_interfaceid": "bc4b0880-3567-4983-a86b-faceec1ad054", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1305.916137] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Releasing lock "refresh_cache-4fb56c2b-1556-479e-9d4e-136a8d1d15ad" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1305.916418] env[68194]: DEBUG nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Received event network-vif-plugged-7fec9dce-9912-42f4-9ac9-7fcde1112591 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1305.916611] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Acquiring lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1305.916810] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1305.916972] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1305.917152] env[68194]: DEBUG nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] No waiting events found dispatching network-vif-plugged-7fec9dce-9912-42f4-9ac9-7fcde1112591 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1305.917321] env[68194]: WARNING nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Received unexpected event network-vif-plugged-7fec9dce-9912-42f4-9ac9-7fcde1112591 for instance with vm_state building and task_state spawning. [ 1305.917481] env[68194]: DEBUG nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Received event network-changed-7fec9dce-9912-42f4-9ac9-7fcde1112591 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1305.917633] env[68194]: DEBUG nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Refreshing instance network info cache due to event network-changed-7fec9dce-9912-42f4-9ac9-7fcde1112591. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1305.917812] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Acquiring lock "refresh_cache-5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1306.181277] env[68194]: DEBUG nova.network.neutron [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Updating instance_info_cache with network_info: [{"id": "7fec9dce-9912-42f4-9ac9-7fcde1112591", "address": "fa:16:3e:83:9d:82", "network": {"id": "eb190768-5530-48fd-b629-d10db9a0a8ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79267755", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fec9dce-99", "ovs_interfaceid": "7fec9dce-9912-42f4-9ac9-7fcde1112591", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "63ec1424-b14d-41ee-90b3-c0c92251a734", "address": "fa:16:3e:c2:08:ab", "network": {"id": "20e44071-dc82-4316-8180-0c3defaafabd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1730084084", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.55", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ec1424-b1", "ovs_interfaceid": "63ec1424-b14d-41ee-90b3-c0c92251a734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92129502-f64c-4861-90ce-8dad6e6598c1", "address": "fa:16:3e:11:78:72", "network": {"id": "eb190768-5530-48fd-b629-d10db9a0a8ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79267755", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92129502-f6", "ovs_interfaceid": "92129502-f64c-4861-90ce-8dad6e6598c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.196126] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Releasing lock "refresh_cache-5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1306.196494] env[68194]: DEBUG nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Instance network_info: |[{"id": "7fec9dce-9912-42f4-9ac9-7fcde1112591", "address": "fa:16:3e:83:9d:82", "network": {"id": "eb190768-5530-48fd-b629-d10db9a0a8ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79267755", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fec9dce-99", "ovs_interfaceid": "7fec9dce-9912-42f4-9ac9-7fcde1112591", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "63ec1424-b14d-41ee-90b3-c0c92251a734", "address": "fa:16:3e:c2:08:ab", "network": {"id": "20e44071-dc82-4316-8180-0c3defaafabd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1730084084", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.55", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ec1424-b1", "ovs_interfaceid": "63ec1424-b14d-41ee-90b3-c0c92251a734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92129502-f64c-4861-90ce-8dad6e6598c1", "address": "fa:16:3e:11:78:72", "network": {"id": "eb190768-5530-48fd-b629-d10db9a0a8ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79267755", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92129502-f6", "ovs_interfaceid": "92129502-f64c-4861-90ce-8dad6e6598c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1306.196799] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Acquired lock "refresh_cache-5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1306.196976] env[68194]: DEBUG nova.network.neutron [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Refreshing network info cache for port 7fec9dce-9912-42f4-9ac9-7fcde1112591 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1306.197991] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:83:9d:82', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7fec9dce-9912-42f4-9ac9-7fcde1112591', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:c2:08:ab', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '4c6a4836-66dc-4e43-982b-f8fcd3f9989a', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '63ec1424-b14d-41ee-90b3-c0c92251a734', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:11:78:72', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'ca16b6db-4f74-424c-9d36-925ad82cbdd2', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '92129502-f64c-4861-90ce-8dad6e6598c1', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1306.208551] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Creating folder: Project (a0fb97649d1045689a80d83477a6f25e). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1306.211647] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-ed1099c7-2996-46cf-acc1-61af79979d17 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.221804] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Created folder: Project (a0fb97649d1045689a80d83477a6f25e) in parent group-v692426. [ 1306.221983] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Creating folder: Instances. Parent ref: group-v692509. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1306.222216] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-12535993-af71-4c56-9fb2-4347ff22079b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.230981] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Created folder: Instances in parent group-v692509. [ 1306.231213] env[68194]: DEBUG oslo.service.loopingcall [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1306.231386] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1306.231571] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-f31cbc62-44d2-419d-8a6d-cfb3a68cd099 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.256510] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1306.256510] env[68194]: value = "task-3466881" [ 1306.256510] env[68194]: _type = "Task" [ 1306.256510] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.264942] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466881, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1306.456662] env[68194]: DEBUG nova.network.neutron [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Updated VIF entry in instance network info cache for port 7fec9dce-9912-42f4-9ac9-7fcde1112591. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1306.457211] env[68194]: DEBUG nova.network.neutron [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Updating instance_info_cache with network_info: [{"id": "7fec9dce-9912-42f4-9ac9-7fcde1112591", "address": "fa:16:3e:83:9d:82", "network": {"id": "eb190768-5530-48fd-b629-d10db9a0a8ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79267755", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fec9dce-99", "ovs_interfaceid": "7fec9dce-9912-42f4-9ac9-7fcde1112591", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "63ec1424-b14d-41ee-90b3-c0c92251a734", "address": "fa:16:3e:c2:08:ab", "network": {"id": "20e44071-dc82-4316-8180-0c3defaafabd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1730084084", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.55", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ec1424-b1", "ovs_interfaceid": "63ec1424-b14d-41ee-90b3-c0c92251a734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92129502-f64c-4861-90ce-8dad6e6598c1", "address": "fa:16:3e:11:78:72", "network": {"id": "eb190768-5530-48fd-b629-d10db9a0a8ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79267755", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92129502-f6", "ovs_interfaceid": "92129502-f64c-4861-90ce-8dad6e6598c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.467160] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Releasing lock "refresh_cache-5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1306.467452] env[68194]: DEBUG nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Received event network-vif-plugged-63ec1424-b14d-41ee-90b3-c0c92251a734 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1306.467658] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Acquiring lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1306.467878] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1306.468070] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1306.468267] env[68194]: DEBUG nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] No waiting events found dispatching network-vif-plugged-63ec1424-b14d-41ee-90b3-c0c92251a734 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1306.468471] env[68194]: WARNING nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Received unexpected event network-vif-plugged-63ec1424-b14d-41ee-90b3-c0c92251a734 for instance with vm_state building and task_state spawning. [ 1306.468639] env[68194]: DEBUG nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Received event network-changed-63ec1424-b14d-41ee-90b3-c0c92251a734 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1306.468808] env[68194]: DEBUG nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Refreshing instance network info cache due to event network-changed-63ec1424-b14d-41ee-90b3-c0c92251a734. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1306.469017] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Acquiring lock "refresh_cache-5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1306.469177] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Acquired lock "refresh_cache-5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1306.469366] env[68194]: DEBUG nova.network.neutron [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Refreshing network info cache for port 63ec1424-b14d-41ee-90b3-c0c92251a734 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1306.719920] env[68194]: DEBUG nova.network.neutron [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Updated VIF entry in instance network info cache for port 63ec1424-b14d-41ee-90b3-c0c92251a734. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1306.720405] env[68194]: DEBUG nova.network.neutron [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Updating instance_info_cache with network_info: [{"id": "7fec9dce-9912-42f4-9ac9-7fcde1112591", "address": "fa:16:3e:83:9d:82", "network": {"id": "eb190768-5530-48fd-b629-d10db9a0a8ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79267755", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fec9dce-99", "ovs_interfaceid": "7fec9dce-9912-42f4-9ac9-7fcde1112591", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "63ec1424-b14d-41ee-90b3-c0c92251a734", "address": "fa:16:3e:c2:08:ab", "network": {"id": "20e44071-dc82-4316-8180-0c3defaafabd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1730084084", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.55", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ec1424-b1", "ovs_interfaceid": "63ec1424-b14d-41ee-90b3-c0c92251a734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92129502-f64c-4861-90ce-8dad6e6598c1", "address": "fa:16:3e:11:78:72", "network": {"id": "eb190768-5530-48fd-b629-d10db9a0a8ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79267755", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92129502-f6", "ovs_interfaceid": "92129502-f64c-4861-90ce-8dad6e6598c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1306.729803] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Releasing lock "refresh_cache-5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1306.730061] env[68194]: DEBUG nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Received event network-vif-plugged-92129502-f64c-4861-90ce-8dad6e6598c1 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1306.730259] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Acquiring lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1306.730464] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1306.730628] env[68194]: DEBUG oslo_concurrency.lockutils [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] Lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1306.730796] env[68194]: DEBUG nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] No waiting events found dispatching network-vif-plugged-92129502-f64c-4861-90ce-8dad6e6598c1 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1306.730963] env[68194]: WARNING nova.compute.manager [req-0d2def64-53fd-4aa2-bca2-20bda2d6e558 req-e514d3f3-6484-4aba-9bf0-f1cb74fd1f5a service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Received unexpected event network-vif-plugged-92129502-f64c-4861-90ce-8dad6e6598c1 for instance with vm_state building and task_state spawning. [ 1306.766705] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466881, 'name': CreateVM_Task, 'duration_secs': 0.394069} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1306.766841] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1306.767686] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1306.767845] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1306.768178] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1306.768414] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-290d93b8-f253-4953-9c96-1eea73bdf310 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1306.772634] env[68194]: DEBUG oslo_vmware.api [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Waiting for the task: (returnval){ [ 1306.772634] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52347fd1-547f-5168-63e5-d02bf9adf02a" [ 1306.772634] env[68194]: _type = "Task" [ 1306.772634] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1306.779795] env[68194]: DEBUG oslo_vmware.api [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52347fd1-547f-5168-63e5-d02bf9adf02a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1307.282996] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1307.283290] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1307.283507] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1307.491538] env[68194]: DEBUG nova.compute.manager [req-f3e777d4-58c0-4ff1-a239-ea9ca254feb5 req-172d0496-37d0-456f-894b-21887cb384e7 service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Received event network-changed-92129502-f64c-4861-90ce-8dad6e6598c1 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1307.491742] env[68194]: DEBUG nova.compute.manager [req-f3e777d4-58c0-4ff1-a239-ea9ca254feb5 req-172d0496-37d0-456f-894b-21887cb384e7 service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Refreshing instance network info cache due to event network-changed-92129502-f64c-4861-90ce-8dad6e6598c1. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1307.491960] env[68194]: DEBUG oslo_concurrency.lockutils [req-f3e777d4-58c0-4ff1-a239-ea9ca254feb5 req-172d0496-37d0-456f-894b-21887cb384e7 service nova] Acquiring lock "refresh_cache-5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1307.492117] env[68194]: DEBUG oslo_concurrency.lockutils [req-f3e777d4-58c0-4ff1-a239-ea9ca254feb5 req-172d0496-37d0-456f-894b-21887cb384e7 service nova] Acquired lock "refresh_cache-5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1307.492280] env[68194]: DEBUG nova.network.neutron [req-f3e777d4-58c0-4ff1-a239-ea9ca254feb5 req-172d0496-37d0-456f-894b-21887cb384e7 service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Refreshing network info cache for port 92129502-f64c-4861-90ce-8dad6e6598c1 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1307.733734] env[68194]: DEBUG nova.network.neutron [req-f3e777d4-58c0-4ff1-a239-ea9ca254feb5 req-172d0496-37d0-456f-894b-21887cb384e7 service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Updated VIF entry in instance network info cache for port 92129502-f64c-4861-90ce-8dad6e6598c1. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1307.734242] env[68194]: DEBUG nova.network.neutron [req-f3e777d4-58c0-4ff1-a239-ea9ca254feb5 req-172d0496-37d0-456f-894b-21887cb384e7 service nova] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Updating instance_info_cache with network_info: [{"id": "7fec9dce-9912-42f4-9ac9-7fcde1112591", "address": "fa:16:3e:83:9d:82", "network": {"id": "eb190768-5530-48fd-b629-d10db9a0a8ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79267755", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.134", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7fec9dce-99", "ovs_interfaceid": "7fec9dce-9912-42f4-9ac9-7fcde1112591", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "63ec1424-b14d-41ee-90b3-c0c92251a734", "address": "fa:16:3e:c2:08:ab", "network": {"id": "20e44071-dc82-4316-8180-0c3defaafabd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1730084084", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.55", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "4c6a4836-66dc-4e43-982b-f8fcd3f9989a", "external-id": "nsx-vlan-transportzone-635", "segmentation_id": 635, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap63ec1424-b1", "ovs_interfaceid": "63ec1424-b14d-41ee-90b3-c0c92251a734", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "92129502-f64c-4861-90ce-8dad6e6598c1", "address": "fa:16:3e:11:78:72", "network": {"id": "eb190768-5530-48fd-b629-d10db9a0a8ea", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-79267755", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.26", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "ca16b6db-4f74-424c-9d36-925ad82cbdd2", "external-id": "nsx-vlan-transportzone-200", "segmentation_id": 200, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap92129502-f6", "ovs_interfaceid": "92129502-f64c-4861-90ce-8dad6e6598c1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1307.744322] env[68194]: DEBUG oslo_concurrency.lockutils [req-f3e777d4-58c0-4ff1-a239-ea9ca254feb5 req-172d0496-37d0-456f-894b-21887cb384e7 service nova] Releasing lock "refresh_cache-5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1308.829851] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "95be4f59-e835-4389-93ae-9814e97f8ef4" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1308.830144] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "95be4f59-e835-4389-93ae-9814e97f8ef4" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1347.642113] env[68194]: WARNING oslo_vmware.rw_handles [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1347.642113] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1347.642113] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1347.642113] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1347.642113] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1347.642113] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1347.642113] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1347.642113] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1347.642113] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1347.642113] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1347.642113] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1347.642113] env[68194]: ERROR oslo_vmware.rw_handles [ 1347.642680] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/51e5411b-975c-4580-b8e9-7d502fb52d60/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1347.645378] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1347.645638] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Copying Virtual Disk [datastore1] vmware_temp/51e5411b-975c-4580-b8e9-7d502fb52d60/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/51e5411b-975c-4580-b8e9-7d502fb52d60/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1347.645939] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-89fccd34-1aa1-40e9-8804-3e81571caf37 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1347.655186] env[68194]: DEBUG oslo_vmware.api [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Waiting for the task: (returnval){ [ 1347.655186] env[68194]: value = "task-3466882" [ 1347.655186] env[68194]: _type = "Task" [ 1347.655186] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1347.664170] env[68194]: DEBUG oslo_vmware.api [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Task: {'id': task-3466882, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.166880] env[68194]: DEBUG oslo_vmware.exceptions [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1348.166880] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1348.166880] env[68194]: ERROR nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1348.166880] env[68194]: Faults: ['InvalidArgument'] [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Traceback (most recent call last): [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] yield resources [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self.driver.spawn(context, instance, image_meta, [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self._fetch_image_if_missing(context, vi) [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] image_cache(vi, tmp_image_ds_loc) [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] vm_util.copy_virtual_disk( [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] session._wait_for_task(vmdk_copy_task) [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] return self.wait_for_task(task_ref) [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] return evt.wait() [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] result = hub.switch() [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] return self.greenlet.switch() [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self.f(*self.args, **self.kw) [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] raise exceptions.translate_fault(task_info.error) [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Faults: ['InvalidArgument'] [ 1348.166880] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] [ 1348.166880] env[68194]: INFO nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Terminating instance [ 1348.167845] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1348.168145] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1348.168277] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d0e3010e-e98d-415e-a144-3823dc3c3fea {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.170592] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquiring lock "refresh_cache-d2e2cf0b-1028-4df3-9170-dc616a04fdc3" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1348.170750] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquired lock "refresh_cache-d2e2cf0b-1028-4df3-9170-dc616a04fdc3" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1348.170920] env[68194]: DEBUG nova.network.neutron [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1348.177802] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1348.177985] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1348.179209] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9e9bb2c0-846b-48a4-ade5-62d6b2d0e98c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.186474] env[68194]: DEBUG oslo_vmware.api [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Waiting for the task: (returnval){ [ 1348.186474] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5236187d-5dfa-c1dd-4f3c-e3d006543e8a" [ 1348.186474] env[68194]: _type = "Task" [ 1348.186474] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.194088] env[68194]: DEBUG oslo_vmware.api [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5236187d-5dfa-c1dd-4f3c-e3d006543e8a, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.204427] env[68194]: DEBUG nova.network.neutron [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1348.269626] env[68194]: DEBUG nova.network.neutron [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1348.278906] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Releasing lock "refresh_cache-d2e2cf0b-1028-4df3-9170-dc616a04fdc3" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1348.279350] env[68194]: DEBUG nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1348.279553] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1348.280675] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7507e199-ceb6-4c48-9060-5e568096e8fd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.288858] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1348.289100] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b655531-c8e2-4a34-acf2-f0983ffdbdcc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.320325] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1348.320546] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1348.320777] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Deleting the datastore file [datastore1] d2e2cf0b-1028-4df3-9170-dc616a04fdc3 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1348.321042] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-85c5a64d-fd65-4c2e-844d-cf3112f1fd76 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.326964] env[68194]: DEBUG oslo_vmware.api [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Waiting for the task: (returnval){ [ 1348.326964] env[68194]: value = "task-3466884" [ 1348.326964] env[68194]: _type = "Task" [ 1348.326964] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1348.335078] env[68194]: DEBUG oslo_vmware.api [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Task: {'id': task-3466884, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1348.696499] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1348.696777] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Creating directory with path [datastore1] vmware_temp/77cd4318-9da1-4a69-a9a0-1eb64e265e3a/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1348.697026] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-cb899dfe-42af-41cd-bf7f-b679cf558990 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.708935] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Created directory with path [datastore1] vmware_temp/77cd4318-9da1-4a69-a9a0-1eb64e265e3a/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1348.709152] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Fetch image to [datastore1] vmware_temp/77cd4318-9da1-4a69-a9a0-1eb64e265e3a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1348.709327] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/77cd4318-9da1-4a69-a9a0-1eb64e265e3a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1348.710089] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9a800a7-e735-45a8-b3a8-6cdb6ca021fc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.717067] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eddaba7a-c93c-4f30-bf1e-3810fc48f36a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.726097] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b7196453-bb1e-4a8c-9a17-dcac3e0d111c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.757307] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2b75105-120f-4fe6-99ab-a05633817ff3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.762598] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8ae387ad-7ea4-4173-b6fe-9f593584fdaa {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1348.782916] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1349.495444] env[68194]: DEBUG oslo_vmware.api [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Task: {'id': task-3466884, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.036369} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1349.495693] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1349.495873] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1349.496073] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1349.496219] env[68194]: INFO nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Took 1.22 seconds to destroy the instance on the hypervisor. [ 1349.496445] env[68194]: DEBUG oslo.service.loopingcall [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1349.496636] env[68194]: DEBUG nova.compute.manager [-] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Skipping network deallocation for instance since networking was not requested. {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1349.498722] env[68194]: DEBUG nova.compute.claims [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1349.498899] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1349.499127] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1349.520068] env[68194]: DEBUG oslo_vmware.rw_handles [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/77cd4318-9da1-4a69-a9a0-1eb64e265e3a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1349.578668] env[68194]: DEBUG oslo_vmware.rw_handles [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1349.578742] env[68194]: DEBUG oslo_vmware.rw_handles [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/77cd4318-9da1-4a69-a9a0-1eb64e265e3a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1349.798115] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db0c8826-a494-4bee-a2b0-b000c3298411 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.805646] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-11a2e566-a5e2-4a04-bb1d-1ad9d1f99329 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.834497] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8cf3a05-7b52-428d-bca0-d39d9ca2ebab {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.841510] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3643852b-7df6-498b-b065-a83937c8296d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1349.854305] env[68194]: DEBUG nova.compute.provider_tree [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1349.862340] env[68194]: DEBUG nova.scheduler.client.report [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1349.877206] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.378s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1349.877747] env[68194]: ERROR nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1349.877747] env[68194]: Faults: ['InvalidArgument'] [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Traceback (most recent call last): [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self.driver.spawn(context, instance, image_meta, [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self._fetch_image_if_missing(context, vi) [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] image_cache(vi, tmp_image_ds_loc) [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] vm_util.copy_virtual_disk( [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] session._wait_for_task(vmdk_copy_task) [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] return self.wait_for_task(task_ref) [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] return evt.wait() [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] result = hub.switch() [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] return self.greenlet.switch() [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self.f(*self.args, **self.kw) [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] raise exceptions.translate_fault(task_info.error) [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Faults: ['InvalidArgument'] [ 1349.877747] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] [ 1349.879378] env[68194]: DEBUG nova.compute.utils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1349.879865] env[68194]: DEBUG nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Build of instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 was re-scheduled: A specified parameter was not correct: fileType [ 1349.879865] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1349.880257] env[68194]: DEBUG nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1349.880485] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquiring lock "refresh_cache-d2e2cf0b-1028-4df3-9170-dc616a04fdc3" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1349.880632] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquired lock "refresh_cache-d2e2cf0b-1028-4df3-9170-dc616a04fdc3" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1349.880794] env[68194]: DEBUG nova.network.neutron [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1349.905075] env[68194]: DEBUG nova.network.neutron [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1350.131719] env[68194]: DEBUG nova.network.neutron [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.140864] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Releasing lock "refresh_cache-d2e2cf0b-1028-4df3-9170-dc616a04fdc3" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1350.141192] env[68194]: DEBUG nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1350.141396] env[68194]: DEBUG nova.compute.manager [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Skipping network deallocation for instance since networking was not requested. {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1350.224035] env[68194]: INFO nova.scheduler.client.report [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Deleted allocations for instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 [ 1350.241353] env[68194]: DEBUG oslo_concurrency.lockutils [None req-cc30954b-03a8-4372-8e1a-63b6c1357d17 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Lock "d2e2cf0b-1028-4df3-9170-dc616a04fdc3" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 621.523s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1350.242421] env[68194]: DEBUG oslo_concurrency.lockutils [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Lock "d2e2cf0b-1028-4df3-9170-dc616a04fdc3" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.653s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1350.242651] env[68194]: DEBUG oslo_concurrency.lockutils [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquiring lock "d2e2cf0b-1028-4df3-9170-dc616a04fdc3-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1350.242861] env[68194]: DEBUG oslo_concurrency.lockutils [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Lock "d2e2cf0b-1028-4df3-9170-dc616a04fdc3-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1350.243039] env[68194]: DEBUG oslo_concurrency.lockutils [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Lock "d2e2cf0b-1028-4df3-9170-dc616a04fdc3-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1350.244869] env[68194]: INFO nova.compute.manager [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Terminating instance [ 1350.246309] env[68194]: DEBUG oslo_concurrency.lockutils [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquiring lock "refresh_cache-d2e2cf0b-1028-4df3-9170-dc616a04fdc3" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1350.246470] env[68194]: DEBUG oslo_concurrency.lockutils [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Acquired lock "refresh_cache-d2e2cf0b-1028-4df3-9170-dc616a04fdc3" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1350.246642] env[68194]: DEBUG nova.network.neutron [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1350.258420] env[68194]: DEBUG nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1350.274015] env[68194]: DEBUG nova.network.neutron [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1350.321336] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1350.321586] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1350.323035] env[68194]: INFO nova.compute.claims [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1350.340622] env[68194]: DEBUG nova.network.neutron [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1350.349683] env[68194]: DEBUG oslo_concurrency.lockutils [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Releasing lock "refresh_cache-d2e2cf0b-1028-4df3-9170-dc616a04fdc3" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1350.350037] env[68194]: DEBUG nova.compute.manager [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1350.350243] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1350.350765] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ea8ba0dc-530e-4b9b-9228-67e94daccb6c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.359808] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-954d7bdb-9ec0-4642-9416-874609294ba9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.389914] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d2e2cf0b-1028-4df3-9170-dc616a04fdc3 could not be found. [ 1350.390211] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1350.390302] env[68194]: INFO nova.compute.manager [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1350.390538] env[68194]: DEBUG oslo.service.loopingcall [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1350.390751] env[68194]: DEBUG nova.compute.manager [-] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1350.390846] env[68194]: DEBUG nova.network.neutron [-] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1350.508067] env[68194]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68194) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1350.508198] env[68194]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-d4cc6a3c-666c-4e0b-99d9-8842845c880d'] [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1350.508952] env[68194]: ERROR oslo.service.loopingcall [ 1350.510930] env[68194]: ERROR nova.compute.manager [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1350.548062] env[68194]: ERROR nova.compute.manager [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Traceback (most recent call last): [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] ret = obj(*args, **kwargs) [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] exception_handler_v20(status_code, error_body) [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] raise client_exc(message=error_message, [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Neutron server returns request_ids: ['req-d4cc6a3c-666c-4e0b-99d9-8842845c880d'] [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] During handling of the above exception, another exception occurred: [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Traceback (most recent call last): [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self._delete_instance(context, instance, bdms) [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self._shutdown_instance(context, instance, bdms) [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self._try_deallocate_network(context, instance, requested_networks) [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] with excutils.save_and_reraise_exception(): [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self.force_reraise() [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] raise self.value [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] _deallocate_network_with_retries() [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] return evt.wait() [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] result = hub.switch() [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] return self.greenlet.switch() [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] result = func(*self.args, **self.kw) [ 1350.548062] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] result = f(*args, **kwargs) [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self._deallocate_network( [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self.network_api.deallocate_for_instance( [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] data = neutron.list_ports(**search_opts) [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] ret = obj(*args, **kwargs) [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] return self.list('ports', self.ports_path, retrieve_all, [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] ret = obj(*args, **kwargs) [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] for r in self._pagination(collection, path, **params): [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] res = self.get(path, params=params) [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] ret = obj(*args, **kwargs) [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] return self.retry_request("GET", action, body=body, [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] ret = obj(*args, **kwargs) [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] return self.do_request(method, action, body=body, [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] ret = obj(*args, **kwargs) [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] self._handle_fault_response(status_code, replybody, resp) [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1350.549313] env[68194]: ERROR nova.compute.manager [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] [ 1350.583362] env[68194]: DEBUG oslo_concurrency.lockutils [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Lock "d2e2cf0b-1028-4df3-9170-dc616a04fdc3" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.341s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1350.587041] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "d2e2cf0b-1028-4df3-9170-dc616a04fdc3" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 139.119s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1350.587041] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] During sync_power_state the instance has a pending task (deleting). Skip. [ 1350.587041] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "d2e2cf0b-1028-4df3-9170-dc616a04fdc3" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1350.649380] env[68194]: INFO nova.compute.manager [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] [instance: d2e2cf0b-1028-4df3-9170-dc616a04fdc3] Successfully reverted task state from None on failure for instance. [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server [None req-83d1cedb-ba97-4743-87ec-75573e2db7a5 tempest-ServerShowV254Test-1087252362 tempest-ServerShowV254Test-1087252362-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-d4cc6a3c-666c-4e0b-99d9-8842845c880d'] [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1350.656890] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1350.658049] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1350.659198] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1350.659198] env[68194]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1350.659198] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1350.659198] env[68194]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1350.659198] env[68194]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1350.659198] env[68194]: ERROR oslo_messaging.rpc.server [ 1350.664884] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b822c1dd-4baa-49a9-949b-36c5ec4b58b6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.674612] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5b5707a-4267-4413-b1c1-1c9d078d12a4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.705550] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b942186-fed1-4dee-8936-f4e3935db77c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.713859] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c27de4ca-df2e-4364-8ae6-b6f02f30a5d3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.728029] env[68194]: DEBUG nova.compute.provider_tree [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1350.741330] env[68194]: DEBUG nova.scheduler.client.report [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1350.758403] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.437s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1350.758992] env[68194]: DEBUG nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1350.792450] env[68194]: DEBUG nova.compute.utils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1350.794366] env[68194]: DEBUG nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1350.794505] env[68194]: DEBUG nova.network.neutron [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1350.802722] env[68194]: DEBUG nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1350.867947] env[68194]: DEBUG nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1350.871382] env[68194]: DEBUG nova.policy [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '40d0d57e63ee4465a710645a5bf18f45', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '12f72224a89d492b9933aea4e509bb7b', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1350.888818] env[68194]: DEBUG nova.virt.hardware [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1350.889086] env[68194]: DEBUG nova.virt.hardware [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1350.889251] env[68194]: DEBUG nova.virt.hardware [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1350.889436] env[68194]: DEBUG nova.virt.hardware [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1350.889610] env[68194]: DEBUG nova.virt.hardware [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1350.889776] env[68194]: DEBUG nova.virt.hardware [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1350.889987] env[68194]: DEBUG nova.virt.hardware [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1350.890264] env[68194]: DEBUG nova.virt.hardware [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1350.890446] env[68194]: DEBUG nova.virt.hardware [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1350.890616] env[68194]: DEBUG nova.virt.hardware [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1350.890789] env[68194]: DEBUG nova.virt.hardware [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1350.891833] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5d50e4b9-2272-43a5-a9b2-c5a2c076f890 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1350.899726] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-756e5c11-6954-48f0-a4b0-c25aabc600d0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1351.237959] env[68194]: DEBUG nova.network.neutron [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Successfully created port: 605e01da-121b-4d80-a90f-f90286c8dd66 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1351.805826] env[68194]: DEBUG nova.compute.manager [req-597efae5-e353-460d-b4ba-e72b1f3003c5 req-7ef82c79-71d8-44c3-978c-86005a992454 service nova] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Received event network-vif-plugged-605e01da-121b-4d80-a90f-f90286c8dd66 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1351.806111] env[68194]: DEBUG oslo_concurrency.lockutils [req-597efae5-e353-460d-b4ba-e72b1f3003c5 req-7ef82c79-71d8-44c3-978c-86005a992454 service nova] Acquiring lock "bf9766c7-1495-4edd-92bd-06a0d036855e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1351.806248] env[68194]: DEBUG oslo_concurrency.lockutils [req-597efae5-e353-460d-b4ba-e72b1f3003c5 req-7ef82c79-71d8-44c3-978c-86005a992454 service nova] Lock "bf9766c7-1495-4edd-92bd-06a0d036855e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1351.806455] env[68194]: DEBUG oslo_concurrency.lockutils [req-597efae5-e353-460d-b4ba-e72b1f3003c5 req-7ef82c79-71d8-44c3-978c-86005a992454 service nova] Lock "bf9766c7-1495-4edd-92bd-06a0d036855e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1351.806778] env[68194]: DEBUG nova.compute.manager [req-597efae5-e353-460d-b4ba-e72b1f3003c5 req-7ef82c79-71d8-44c3-978c-86005a992454 service nova] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] No waiting events found dispatching network-vif-plugged-605e01da-121b-4d80-a90f-f90286c8dd66 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1351.806901] env[68194]: WARNING nova.compute.manager [req-597efae5-e353-460d-b4ba-e72b1f3003c5 req-7ef82c79-71d8-44c3-978c-86005a992454 service nova] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Received unexpected event network-vif-plugged-605e01da-121b-4d80-a90f-f90286c8dd66 for instance with vm_state building and task_state spawning. [ 1351.883981] env[68194]: DEBUG nova.network.neutron [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Successfully updated port: 605e01da-121b-4d80-a90f-f90286c8dd66 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1351.895159] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Acquiring lock "refresh_cache-bf9766c7-1495-4edd-92bd-06a0d036855e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1351.895369] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Acquired lock "refresh_cache-bf9766c7-1495-4edd-92bd-06a0d036855e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1351.895557] env[68194]: DEBUG nova.network.neutron [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1351.956466] env[68194]: DEBUG nova.network.neutron [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1352.161334] env[68194]: DEBUG nova.network.neutron [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Updating instance_info_cache with network_info: [{"id": "605e01da-121b-4d80-a90f-f90286c8dd66", "address": "fa:16:3e:39:0c:c2", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap605e01da-12", "ovs_interfaceid": "605e01da-121b-4d80-a90f-f90286c8dd66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1352.172653] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Releasing lock "refresh_cache-bf9766c7-1495-4edd-92bd-06a0d036855e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1352.172961] env[68194]: DEBUG nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Instance network_info: |[{"id": "605e01da-121b-4d80-a90f-f90286c8dd66", "address": "fa:16:3e:39:0c:c2", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap605e01da-12", "ovs_interfaceid": "605e01da-121b-4d80-a90f-f90286c8dd66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1352.173365] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:39:0c:c2', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '605e01da-121b-4d80-a90f-f90286c8dd66', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1352.180842] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Creating folder: Project (12f72224a89d492b9933aea4e509bb7b). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1352.181969] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cb9a5c29-630b-427e-9c1f-bdf38a01f175 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.191826] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Created folder: Project (12f72224a89d492b9933aea4e509bb7b) in parent group-v692426. [ 1352.192017] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Creating folder: Instances. Parent ref: group-v692512. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1352.192246] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1277fde9-9f5e-462d-8200-82b774013fb9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.200749] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Created folder: Instances in parent group-v692512. [ 1352.200995] env[68194]: DEBUG oslo.service.loopingcall [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1352.201195] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1352.201385] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-706571d0-c206-4f54-98e7-23c905a64e4a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.220350] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1352.220350] env[68194]: value = "task-3466887" [ 1352.220350] env[68194]: _type = "Task" [ 1352.220350] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.227553] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466887, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.730335] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466887, 'name': CreateVM_Task, 'duration_secs': 0.307464} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1352.730574] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1352.731339] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1352.731550] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1352.731898] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1352.732215] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-cf1664a3-3814-45e8-a21a-17dd4051c863 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1352.736900] env[68194]: DEBUG oslo_vmware.api [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Waiting for the task: (returnval){ [ 1352.736900] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52aab95b-ced6-5661-f7c4-c33013444695" [ 1352.736900] env[68194]: _type = "Task" [ 1352.736900] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1352.744462] env[68194]: DEBUG oslo_vmware.api [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52aab95b-ced6-5661-f7c4-c33013444695, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1352.905761] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquiring lock "73abf0ba-016c-4536-afd3-f6c6960045fc" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1352.906038] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Lock "73abf0ba-016c-4536-afd3-f6c6960045fc" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1353.246932] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1353.246932] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1353.246932] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1353.901577] env[68194]: DEBUG nova.compute.manager [req-5081de42-026d-48d3-99ba-c4afb5a1e493 req-7d06f3d4-651f-42a8-a881-a0412ffbb8b4 service nova] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Received event network-changed-605e01da-121b-4d80-a90f-f90286c8dd66 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1353.901784] env[68194]: DEBUG nova.compute.manager [req-5081de42-026d-48d3-99ba-c4afb5a1e493 req-7d06f3d4-651f-42a8-a881-a0412ffbb8b4 service nova] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Refreshing instance network info cache due to event network-changed-605e01da-121b-4d80-a90f-f90286c8dd66. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1353.902019] env[68194]: DEBUG oslo_concurrency.lockutils [req-5081de42-026d-48d3-99ba-c4afb5a1e493 req-7d06f3d4-651f-42a8-a881-a0412ffbb8b4 service nova] Acquiring lock "refresh_cache-bf9766c7-1495-4edd-92bd-06a0d036855e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1353.902163] env[68194]: DEBUG oslo_concurrency.lockutils [req-5081de42-026d-48d3-99ba-c4afb5a1e493 req-7d06f3d4-651f-42a8-a881-a0412ffbb8b4 service nova] Acquired lock "refresh_cache-bf9766c7-1495-4edd-92bd-06a0d036855e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1353.902325] env[68194]: DEBUG nova.network.neutron [req-5081de42-026d-48d3-99ba-c4afb5a1e493 req-7d06f3d4-651f-42a8-a881-a0412ffbb8b4 service nova] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Refreshing network info cache for port 605e01da-121b-4d80-a90f-f90286c8dd66 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1354.153675] env[68194]: DEBUG nova.network.neutron [req-5081de42-026d-48d3-99ba-c4afb5a1e493 req-7d06f3d4-651f-42a8-a881-a0412ffbb8b4 service nova] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Updated VIF entry in instance network info cache for port 605e01da-121b-4d80-a90f-f90286c8dd66. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1354.154056] env[68194]: DEBUG nova.network.neutron [req-5081de42-026d-48d3-99ba-c4afb5a1e493 req-7d06f3d4-651f-42a8-a881-a0412ffbb8b4 service nova] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Updating instance_info_cache with network_info: [{"id": "605e01da-121b-4d80-a90f-f90286c8dd66", "address": "fa:16:3e:39:0c:c2", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.176", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap605e01da-12", "ovs_interfaceid": "605e01da-121b-4d80-a90f-f90286c8dd66", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1354.163488] env[68194]: DEBUG oslo_concurrency.lockutils [req-5081de42-026d-48d3-99ba-c4afb5a1e493 req-7d06f3d4-651f-42a8-a881-a0412ffbb8b4 service nova] Releasing lock "refresh_cache-bf9766c7-1495-4edd-92bd-06a0d036855e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1354.265916] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Acquiring lock "4fb56c2b-1556-479e-9d4e-136a8d1d15ad" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1355.416059] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1355.416354] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1357.417473] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.415906] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.416226] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1359.416346] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1360.416026] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1360.428512] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1360.428787] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1360.428960] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1360.429132] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1360.430256] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f88aa852-fcda-4e7b-bdb0-bc8f6c5bad87 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.438894] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3cd0d26d-baa6-47cc-9e10-080612b8bf9c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.452433] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f83a278-8ea2-448d-8ba8-1640808515af {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.458436] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c49d85ee-4970-47f1-a356-53d7e14f8d85 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.486376] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180938MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1360.486607] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1360.486840] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1360.561228] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ff16d7c1-a601-4ac6-be52-823727c8b843 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1360.561386] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1360.561516] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ce0f1886-189f-4ab3-9ed6-376dce542f5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1360.561640] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1360.561761] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1360.561884] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcb53c97-8d95-4d67-b310-d19087b0b298 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1360.562010] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b487291e-1b85-4064-9949-3d8895b6dcae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1360.562132] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1360.562248] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1360.562361] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bf9766c7-1495-4edd-92bd-06a0d036855e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1360.572681] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.582346] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ada24904-c85b-4af9-be4c-afc8514b7307 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.591302] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fa0796d0-14e5-4bcc-9571-3193f4c1185e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.600629] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3da3b410-889a-42c5-9603-f92f689ab5b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.609154] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 874d08ae-ce38-4a35-bd3f-5c40a2c9bf97 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.618914] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fa78516a-fe6f-4770-9def-ebe439e87adc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.627568] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 92ae0029-0d42-4655-9971-6dfbc07df15d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.636111] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 95be4f59-e835-4389-93ae-9814e97f8ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.644190] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 73abf0ba-016c-4536-afd3-f6c6960045fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1360.644402] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1360.644549] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1360.854260] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-646b2692-99b1-44bf-97a2-61061d841e7d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.861909] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2edbc750-974d-41b4-9bdf-b9cc2b458a74 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.891258] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f55e1c62-590c-45ba-b014-951f8105a9f3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.898451] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53703596-fc7a-48ea-a50d-9ea50b28a670 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1360.911984] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1360.920250] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1360.933915] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1360.934122] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.447s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1361.935171] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1363.412015] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1363.415825] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1363.415996] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1363.416144] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1363.438337] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1363.438499] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1363.438635] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1363.438782] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1363.438912] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1363.439052] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1363.439180] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1363.439303] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1363.439425] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1363.439543] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1363.439666] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1397.880052] env[68194]: WARNING oslo_vmware.rw_handles [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1397.880052] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1397.880052] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1397.880052] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1397.880052] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1397.880052] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1397.880052] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1397.880052] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1397.880052] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1397.880052] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1397.880052] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1397.880052] env[68194]: ERROR oslo_vmware.rw_handles [ 1397.880052] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/77cd4318-9da1-4a69-a9a0-1eb64e265e3a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1397.881810] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1397.882073] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Copying Virtual Disk [datastore1] vmware_temp/77cd4318-9da1-4a69-a9a0-1eb64e265e3a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/77cd4318-9da1-4a69-a9a0-1eb64e265e3a/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1397.882375] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c30ac2fb-d632-4591-9c05-ce0437e2b559 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1397.890209] env[68194]: DEBUG oslo_vmware.api [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Waiting for the task: (returnval){ [ 1397.890209] env[68194]: value = "task-3466888" [ 1397.890209] env[68194]: _type = "Task" [ 1397.890209] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1397.898261] env[68194]: DEBUG oslo_vmware.api [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Task: {'id': task-3466888, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.400864] env[68194]: DEBUG oslo_vmware.exceptions [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1398.401163] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1398.401703] env[68194]: ERROR nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1398.401703] env[68194]: Faults: ['InvalidArgument'] [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Traceback (most recent call last): [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] yield resources [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self.driver.spawn(context, instance, image_meta, [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._fetch_image_if_missing(context, vi) [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] image_cache(vi, tmp_image_ds_loc) [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] vm_util.copy_virtual_disk( [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] session._wait_for_task(vmdk_copy_task) [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.wait_for_task(task_ref) [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return evt.wait() [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] result = hub.switch() [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.greenlet.switch() [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self.f(*self.args, **self.kw) [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] raise exceptions.translate_fault(task_info.error) [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Faults: ['InvalidArgument'] [ 1398.401703] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] [ 1398.402715] env[68194]: INFO nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Terminating instance [ 1398.403636] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1398.403845] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1398.404124] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-bf7daae8-d4c7-408e-a855-d0a135e24001 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.406295] env[68194]: DEBUG nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1398.406488] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1398.407216] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2139e906-cae8-4443-81ea-84bbe8b13478 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.414042] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1398.414277] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f580c180-2c09-4348-a7b7-21f85d1f79c6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.416473] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1398.416650] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1398.417639] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-2cac4993-84e0-47c3-afcd-857a2255bfb0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.422868] env[68194]: DEBUG oslo_vmware.api [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Waiting for the task: (returnval){ [ 1398.422868] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5230418f-bf4b-0d9e-ef7a-80a9bacddfaa" [ 1398.422868] env[68194]: _type = "Task" [ 1398.422868] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.430034] env[68194]: DEBUG oslo_vmware.api [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5230418f-bf4b-0d9e-ef7a-80a9bacddfaa, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.481969] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1398.482208] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1398.482389] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Deleting the datastore file [datastore1] ff16d7c1-a601-4ac6-be52-823727c8b843 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1398.482648] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2a17f841-6f39-46ca-855d-4f3c9f6f2d74 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.488610] env[68194]: DEBUG oslo_vmware.api [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Waiting for the task: (returnval){ [ 1398.488610] env[68194]: value = "task-3466890" [ 1398.488610] env[68194]: _type = "Task" [ 1398.488610] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1398.495872] env[68194]: DEBUG oslo_vmware.api [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Task: {'id': task-3466890, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1398.934165] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1398.934636] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Creating directory with path [datastore1] vmware_temp/e5266bd2-30af-49f8-b487-08f04e0703e8/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1398.934832] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d9493f6e-7f75-4aed-bf8b-1e3f5c54e1f7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.946820] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Created directory with path [datastore1] vmware_temp/e5266bd2-30af-49f8-b487-08f04e0703e8/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1398.947040] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Fetch image to [datastore1] vmware_temp/e5266bd2-30af-49f8-b487-08f04e0703e8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1398.947208] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/e5266bd2-30af-49f8-b487-08f04e0703e8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1398.947974] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5ccdcff5-cf74-4e11-b907-0b12cd2ce70e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.954632] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e77110d4-0a81-4e7e-9242-6cdadfe13bac {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.963623] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2670151-a7db-412f-90e7-0e7b3c6c2c2d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1398.997170] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-273d8edd-1624-4fa4-b505-f7e0467ed094 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.003882] env[68194]: DEBUG oslo_vmware.api [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Task: {'id': task-3466890, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075554} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1399.005331] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1399.005534] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1399.005707] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1399.005883] env[68194]: INFO nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1399.007632] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-01c4613d-698f-4ff4-8c1f-1d8fcc852480 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.009538] env[68194]: DEBUG nova.compute.claims [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1399.009729] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1399.009947] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1399.030591] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1399.082075] env[68194]: DEBUG oslo_vmware.rw_handles [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e5266bd2-30af-49f8-b487-08f04e0703e8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1399.141460] env[68194]: DEBUG oslo_vmware.rw_handles [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1399.141643] env[68194]: DEBUG oslo_vmware.rw_handles [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/e5266bd2-30af-49f8-b487-08f04e0703e8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1399.311728] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61195670-9fdf-4634-92f3-abb509c68d6d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.318889] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2cdbcced-e0b1-4945-8643-063a986b1bc5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.347172] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b353ff27-4210-4b1f-8cc2-040d36aae5f2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.353888] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64a5e6d4-4d25-41d3-9b3f-19d609b19b5c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.366586] env[68194]: DEBUG nova.compute.provider_tree [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1399.375110] env[68194]: DEBUG nova.scheduler.client.report [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1399.408702] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.399s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1399.409289] env[68194]: ERROR nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1399.409289] env[68194]: Faults: ['InvalidArgument'] [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Traceback (most recent call last): [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self.driver.spawn(context, instance, image_meta, [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._fetch_image_if_missing(context, vi) [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] image_cache(vi, tmp_image_ds_loc) [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] vm_util.copy_virtual_disk( [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] session._wait_for_task(vmdk_copy_task) [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.wait_for_task(task_ref) [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return evt.wait() [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] result = hub.switch() [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.greenlet.switch() [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self.f(*self.args, **self.kw) [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] raise exceptions.translate_fault(task_info.error) [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Faults: ['InvalidArgument'] [ 1399.409289] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] [ 1399.410150] env[68194]: DEBUG nova.compute.utils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1399.411571] env[68194]: DEBUG nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Build of instance ff16d7c1-a601-4ac6-be52-823727c8b843 was re-scheduled: A specified parameter was not correct: fileType [ 1399.411571] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1399.411949] env[68194]: DEBUG nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1399.412173] env[68194]: DEBUG nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1399.412295] env[68194]: DEBUG nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1399.412459] env[68194]: DEBUG nova.network.neutron [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1399.514200] env[68194]: DEBUG neutronclient.v2_0.client [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68194) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1399.515256] env[68194]: ERROR nova.compute.manager [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Traceback (most recent call last): [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self.driver.spawn(context, instance, image_meta, [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._fetch_image_if_missing(context, vi) [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] image_cache(vi, tmp_image_ds_loc) [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] vm_util.copy_virtual_disk( [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] session._wait_for_task(vmdk_copy_task) [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.wait_for_task(task_ref) [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return evt.wait() [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] result = hub.switch() [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.greenlet.switch() [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self.f(*self.args, **self.kw) [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] raise exceptions.translate_fault(task_info.error) [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Faults: ['InvalidArgument'] [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] During handling of the above exception, another exception occurred: [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Traceback (most recent call last): [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 2430, in _do_build_and_run_instance [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._build_and_run_instance(context, instance, image, [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 2722, in _build_and_run_instance [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] raise exception.RescheduledException( [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] nova.exception.RescheduledException: Build of instance ff16d7c1-a601-4ac6-be52-823727c8b843 was re-scheduled: A specified parameter was not correct: fileType [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Faults: ['InvalidArgument'] [ 1399.515256] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] During handling of the above exception, another exception occurred: [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Traceback (most recent call last): [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] ret = obj(*args, **kwargs) [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] exception_handler_v20(status_code, error_body) [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] raise client_exc(message=error_message, [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Neutron server returns request_ids: ['req-dc49dcb7-8f7f-4f84-b464-f276effada00'] [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] During handling of the above exception, another exception occurred: [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Traceback (most recent call last): [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 3019, in _cleanup_allocated_networks [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._deallocate_network(context, instance, requested_networks) [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self.network_api.deallocate_for_instance( [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] data = neutron.list_ports(**search_opts) [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] ret = obj(*args, **kwargs) [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.list('ports', self.ports_path, retrieve_all, [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] ret = obj(*args, **kwargs) [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] for r in self._pagination(collection, path, **params): [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] res = self.get(path, params=params) [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] ret = obj(*args, **kwargs) [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.retry_request("GET", action, body=body, [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] ret = obj(*args, **kwargs) [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1399.516388] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.do_request(method, action, body=body, [ 1399.517452] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1399.517452] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] ret = obj(*args, **kwargs) [ 1399.517452] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1399.517452] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._handle_fault_response(status_code, replybody, resp) [ 1399.517452] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1399.517452] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] raise exception.Unauthorized() [ 1399.517452] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] nova.exception.Unauthorized: Not authorized. [ 1399.517452] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] [ 1399.569172] env[68194]: INFO nova.scheduler.client.report [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Deleted allocations for instance ff16d7c1-a601-4ac6-be52-823727c8b843 [ 1399.589930] env[68194]: DEBUG oslo_concurrency.lockutils [None req-971e1162-cdd2-4066-a25b-20c0cf282055 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Lock "ff16d7c1-a601-4ac6-be52-823727c8b843" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 639.886s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1399.591008] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Lock "ff16d7c1-a601-4ac6-be52-823727c8b843" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 443.549s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1399.591008] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquiring lock "ff16d7c1-a601-4ac6-be52-823727c8b843-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1399.591008] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Lock "ff16d7c1-a601-4ac6-be52-823727c8b843-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1399.591008] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Lock "ff16d7c1-a601-4ac6-be52-823727c8b843-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1399.592776] env[68194]: INFO nova.compute.manager [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Terminating instance [ 1399.594275] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquiring lock "refresh_cache-ff16d7c1-a601-4ac6-be52-823727c8b843" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1399.594437] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Acquired lock "refresh_cache-ff16d7c1-a601-4ac6-be52-823727c8b843" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1399.594597] env[68194]: DEBUG nova.network.neutron [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1399.601081] env[68194]: DEBUG nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1399.655011] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1399.655011] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1399.656133] env[68194]: INFO nova.compute.claims [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1399.889666] env[68194]: DEBUG nova.network.neutron [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Updating instance_info_cache with network_info: [{"id": "84edd7ee-51a0-4fc3-ad04-c7c683d5b15f", "address": "fa:16:3e:71:76:7f", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.229", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap84edd7ee-51", "ovs_interfaceid": "84edd7ee-51a0-4fc3-ad04-c7c683d5b15f", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1399.900603] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Releasing lock "refresh_cache-ff16d7c1-a601-4ac6-be52-823727c8b843" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1399.901242] env[68194]: DEBUG nova.compute.manager [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1399.901610] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1399.902745] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-84d82bb0-ed99-4194-8418-66f46e48cb70 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.912735] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8830b5b7-14e3-47c4-ae80-e66ee4460ba0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.945178] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ff16d7c1-a601-4ac6-be52-823727c8b843 could not be found. [ 1399.945791] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1399.946135] env[68194]: INFO nova.compute.manager [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1399.946601] env[68194]: DEBUG oslo.service.loopingcall [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1399.950128] env[68194]: DEBUG nova.compute.manager [-] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1399.950128] env[68194]: DEBUG nova.network.neutron [-] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1399.966309] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1313a77b-3bf2-460e-95c1-5d4c3edb0825 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1399.975043] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1afc3551-ae9d-439a-9306-166c887046bf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.009644] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3b626b3-5b48-4203-b7e7-93271850923a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.018745] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cfc93d9-b1d7-4dbf-b88a-237625ab8a26 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.032674] env[68194]: DEBUG nova.compute.provider_tree [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1400.046020] env[68194]: DEBUG nova.scheduler.client.report [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1400.071156] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.416s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1400.071156] env[68194]: DEBUG nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1400.087311] env[68194]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68194) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1400.087888] env[68194]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-2c42b2e3-6416-4d97-b638-e6a82d50029c'] [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1400.088262] env[68194]: ERROR oslo.service.loopingcall [ 1400.089642] env[68194]: ERROR nova.compute.manager [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1400.120469] env[68194]: ERROR nova.compute.manager [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Traceback (most recent call last): [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] ret = obj(*args, **kwargs) [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] exception_handler_v20(status_code, error_body) [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] raise client_exc(message=error_message, [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Neutron server returns request_ids: ['req-2c42b2e3-6416-4d97-b638-e6a82d50029c'] [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] During handling of the above exception, another exception occurred: [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Traceback (most recent call last): [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._delete_instance(context, instance, bdms) [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._shutdown_instance(context, instance, bdms) [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._try_deallocate_network(context, instance, requested_networks) [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] with excutils.save_and_reraise_exception(): [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self.force_reraise() [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] raise self.value [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] _deallocate_network_with_retries() [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return evt.wait() [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] result = hub.switch() [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.greenlet.switch() [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] result = func(*self.args, **self.kw) [ 1400.120469] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] result = f(*args, **kwargs) [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._deallocate_network( [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self.network_api.deallocate_for_instance( [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] data = neutron.list_ports(**search_opts) [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] ret = obj(*args, **kwargs) [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.list('ports', self.ports_path, retrieve_all, [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] ret = obj(*args, **kwargs) [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] for r in self._pagination(collection, path, **params): [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] res = self.get(path, params=params) [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] ret = obj(*args, **kwargs) [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.retry_request("GET", action, body=body, [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] ret = obj(*args, **kwargs) [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] return self.do_request(method, action, body=body, [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] ret = obj(*args, **kwargs) [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] self._handle_fault_response(status_code, replybody, resp) [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1400.121680] env[68194]: ERROR nova.compute.manager [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] [ 1400.124013] env[68194]: DEBUG nova.compute.utils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1400.125315] env[68194]: DEBUG nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1400.125472] env[68194]: DEBUG nova.network.neutron [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1400.136417] env[68194]: DEBUG nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1400.167255] env[68194]: DEBUG oslo_concurrency.lockutils [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Lock "ff16d7c1-a601-4ac6-be52-823727c8b843" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.575s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1400.168091] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "ff16d7c1-a601-4ac6-be52-823727c8b843" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 188.701s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1400.168479] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] During sync_power_state the instance has a pending task (spawning). Skip. [ 1400.168788] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "ff16d7c1-a601-4ac6-be52-823727c8b843" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.002s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1400.198691] env[68194]: DEBUG nova.policy [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'e0d619ddcfa442a2a79c5562b1e3d2f4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'ac8fb97945ad44a79d127c2ac56489a1', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1400.223522] env[68194]: DEBUG nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1400.238664] env[68194]: INFO nova.compute.manager [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] [instance: ff16d7c1-a601-4ac6-be52-823727c8b843] Successfully reverted task state from None on failure for instance. [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server [None req-3384e7f7-5b83-4c1a-961d-6bc93d6eb8d9 tempest-ServersAdminNegativeTestJSON-1209826617 tempest-ServersAdminNegativeTestJSON-1209826617-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-2c42b2e3-6416-4d97-b638-e6a82d50029c'] [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1400.242678] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1400.244174] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1400.245564] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1400.245564] env[68194]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1400.245564] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1400.245564] env[68194]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1400.245564] env[68194]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1400.245564] env[68194]: ERROR oslo_messaging.rpc.server [ 1400.251585] env[68194]: DEBUG nova.virt.hardware [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1400.251816] env[68194]: DEBUG nova.virt.hardware [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1400.251976] env[68194]: DEBUG nova.virt.hardware [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1400.252175] env[68194]: DEBUG nova.virt.hardware [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1400.252328] env[68194]: DEBUG nova.virt.hardware [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1400.252482] env[68194]: DEBUG nova.virt.hardware [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1400.252717] env[68194]: DEBUG nova.virt.hardware [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1400.252891] env[68194]: DEBUG nova.virt.hardware [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1400.253792] env[68194]: DEBUG nova.virt.hardware [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1400.253792] env[68194]: DEBUG nova.virt.hardware [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1400.253792] env[68194]: DEBUG nova.virt.hardware [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1400.255054] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f2089e4-f836-4b7b-93b1-08e266ea231b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.262893] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90f5f503-1db6-435f-a0c4-9b86309113db {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1400.512062] env[68194]: DEBUG nova.network.neutron [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Successfully created port: a27faca8-661f-418d-9151-6335753967aa {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1401.090020] env[68194]: DEBUG nova.compute.manager [req-ce3f83b3-c6d0-491a-9aa6-41b53b2ef4a3 req-c6a516e6-7e34-4e03-a198-7b7fe43c84f0 service nova] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Received event network-vif-plugged-a27faca8-661f-418d-9151-6335753967aa {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1401.090020] env[68194]: DEBUG oslo_concurrency.lockutils [req-ce3f83b3-c6d0-491a-9aa6-41b53b2ef4a3 req-c6a516e6-7e34-4e03-a198-7b7fe43c84f0 service nova] Acquiring lock "7b430b72-05fa-49a6-8bbb-7c083cb96457-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1401.090020] env[68194]: DEBUG oslo_concurrency.lockutils [req-ce3f83b3-c6d0-491a-9aa6-41b53b2ef4a3 req-c6a516e6-7e34-4e03-a198-7b7fe43c84f0 service nova] Lock "7b430b72-05fa-49a6-8bbb-7c083cb96457-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1401.090020] env[68194]: DEBUG oslo_concurrency.lockutils [req-ce3f83b3-c6d0-491a-9aa6-41b53b2ef4a3 req-c6a516e6-7e34-4e03-a198-7b7fe43c84f0 service nova] Lock "7b430b72-05fa-49a6-8bbb-7c083cb96457-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1401.090020] env[68194]: DEBUG nova.compute.manager [req-ce3f83b3-c6d0-491a-9aa6-41b53b2ef4a3 req-c6a516e6-7e34-4e03-a198-7b7fe43c84f0 service nova] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] No waiting events found dispatching network-vif-plugged-a27faca8-661f-418d-9151-6335753967aa {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1401.090020] env[68194]: WARNING nova.compute.manager [req-ce3f83b3-c6d0-491a-9aa6-41b53b2ef4a3 req-c6a516e6-7e34-4e03-a198-7b7fe43c84f0 service nova] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Received unexpected event network-vif-plugged-a27faca8-661f-418d-9151-6335753967aa for instance with vm_state building and task_state spawning. [ 1401.095585] env[68194]: DEBUG nova.network.neutron [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Successfully updated port: a27faca8-661f-418d-9151-6335753967aa {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1401.111708] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Acquiring lock "refresh_cache-7b430b72-05fa-49a6-8bbb-7c083cb96457" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1401.111897] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Acquired lock "refresh_cache-7b430b72-05fa-49a6-8bbb-7c083cb96457" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1401.112029] env[68194]: DEBUG nova.network.neutron [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1401.155466] env[68194]: DEBUG nova.network.neutron [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1401.313487] env[68194]: DEBUG nova.network.neutron [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Updating instance_info_cache with network_info: [{"id": "a27faca8-661f-418d-9151-6335753967aa", "address": "fa:16:3e:28:1e:7f", "network": {"id": "5fd38835-e1ec-487e-a224-216217ed60a6", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-62586306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac8fb97945ad44a79d127c2ac56489a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8480e3f5-68bd-4c27-ae1f-7c994a8202b1", "external-id": "nsx-vlan-transportzone-628", "segmentation_id": 628, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa27faca8-66", "ovs_interfaceid": "a27faca8-661f-418d-9151-6335753967aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1401.323552] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Releasing lock "refresh_cache-7b430b72-05fa-49a6-8bbb-7c083cb96457" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1401.323844] env[68194]: DEBUG nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Instance network_info: |[{"id": "a27faca8-661f-418d-9151-6335753967aa", "address": "fa:16:3e:28:1e:7f", "network": {"id": "5fd38835-e1ec-487e-a224-216217ed60a6", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-62586306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac8fb97945ad44a79d127c2ac56489a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8480e3f5-68bd-4c27-ae1f-7c994a8202b1", "external-id": "nsx-vlan-transportzone-628", "segmentation_id": 628, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa27faca8-66", "ovs_interfaceid": "a27faca8-661f-418d-9151-6335753967aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1401.324247] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:28:1e:7f', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8480e3f5-68bd-4c27-ae1f-7c994a8202b1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a27faca8-661f-418d-9151-6335753967aa', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1401.331858] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Creating folder: Project (ac8fb97945ad44a79d127c2ac56489a1). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1401.332405] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-5d54ab9e-0658-4136-863e-b303ad05feb3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.343617] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Created folder: Project (ac8fb97945ad44a79d127c2ac56489a1) in parent group-v692426. [ 1401.343797] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Creating folder: Instances. Parent ref: group-v692515. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1401.344032] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-183d43b5-4c26-492b-a5f5-ad3523c6000d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.352710] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Created folder: Instances in parent group-v692515. [ 1401.352940] env[68194]: DEBUG oslo.service.loopingcall [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1401.353154] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1401.353332] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-0276b375-c8e3-4928-af07-d2a8e0799906 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.372384] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1401.372384] env[68194]: value = "task-3466893" [ 1401.372384] env[68194]: _type = "Task" [ 1401.372384] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.380215] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466893, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1401.882269] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466893, 'name': CreateVM_Task, 'duration_secs': 0.290064} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1401.882455] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1401.883133] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1401.883326] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1401.883654] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1401.883907] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-39d2bb84-faab-4291-bdd0-f6251ff8d912 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1401.888358] env[68194]: DEBUG oslo_vmware.api [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Waiting for the task: (returnval){ [ 1401.888358] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]527f170c-a348-2e24-66d2-38f019c9e9bb" [ 1401.888358] env[68194]: _type = "Task" [ 1401.888358] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1401.896362] env[68194]: DEBUG oslo_vmware.api [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]527f170c-a348-2e24-66d2-38f019c9e9bb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1402.398727] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1402.399046] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1402.399199] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1403.112834] env[68194]: DEBUG nova.compute.manager [req-551a45de-147d-4108-a35e-ff9a122265e7 req-46ac6b78-6540-4ba4-b734-72cb6421030c service nova] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Received event network-changed-a27faca8-661f-418d-9151-6335753967aa {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1403.112995] env[68194]: DEBUG nova.compute.manager [req-551a45de-147d-4108-a35e-ff9a122265e7 req-46ac6b78-6540-4ba4-b734-72cb6421030c service nova] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Refreshing instance network info cache due to event network-changed-a27faca8-661f-418d-9151-6335753967aa. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1403.113237] env[68194]: DEBUG oslo_concurrency.lockutils [req-551a45de-147d-4108-a35e-ff9a122265e7 req-46ac6b78-6540-4ba4-b734-72cb6421030c service nova] Acquiring lock "refresh_cache-7b430b72-05fa-49a6-8bbb-7c083cb96457" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1403.113390] env[68194]: DEBUG oslo_concurrency.lockutils [req-551a45de-147d-4108-a35e-ff9a122265e7 req-46ac6b78-6540-4ba4-b734-72cb6421030c service nova] Acquired lock "refresh_cache-7b430b72-05fa-49a6-8bbb-7c083cb96457" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1403.113551] env[68194]: DEBUG nova.network.neutron [req-551a45de-147d-4108-a35e-ff9a122265e7 req-46ac6b78-6540-4ba4-b734-72cb6421030c service nova] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Refreshing network info cache for port a27faca8-661f-418d-9151-6335753967aa {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1403.364997] env[68194]: DEBUG nova.network.neutron [req-551a45de-147d-4108-a35e-ff9a122265e7 req-46ac6b78-6540-4ba4-b734-72cb6421030c service nova] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Updated VIF entry in instance network info cache for port a27faca8-661f-418d-9151-6335753967aa. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1403.365381] env[68194]: DEBUG nova.network.neutron [req-551a45de-147d-4108-a35e-ff9a122265e7 req-46ac6b78-6540-4ba4-b734-72cb6421030c service nova] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Updating instance_info_cache with network_info: [{"id": "a27faca8-661f-418d-9151-6335753967aa", "address": "fa:16:3e:28:1e:7f", "network": {"id": "5fd38835-e1ec-487e-a224-216217ed60a6", "bridge": "br-int", "label": "tempest-ServerActionsTestJSON-62586306-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.5", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "ac8fb97945ad44a79d127c2ac56489a1", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8480e3f5-68bd-4c27-ae1f-7c994a8202b1", "external-id": "nsx-vlan-transportzone-628", "segmentation_id": 628, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa27faca8-66", "ovs_interfaceid": "a27faca8-661f-418d-9151-6335753967aa", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1403.374281] env[68194]: DEBUG oslo_concurrency.lockutils [req-551a45de-147d-4108-a35e-ff9a122265e7 req-46ac6b78-6540-4ba4-b734-72cb6421030c service nova] Releasing lock "refresh_cache-7b430b72-05fa-49a6-8bbb-7c083cb96457" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1413.197806] env[68194]: DEBUG oslo_concurrency.lockutils [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1416.416581] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1416.416897] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1418.417402] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1420.417294] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1420.417622] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1420.417740] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1421.416181] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1421.427285] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1421.427608] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1421.427772] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1421.427907] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1421.428981] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b367d11b-c6c2-43e8-8cca-db36d6dee311 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.437621] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d5f0775-a58b-4f7d-a797-eba80a9ccc7b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.451474] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-90cadf6f-aae6-4212-8157-7a71a8385e01 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.457572] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4272f71d-b7f9-4427-b2e4-7bbff34a458f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.485936] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180965MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1421.486118] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1421.486317] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1421.565735] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.565921] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ce0f1886-189f-4ab3-9ed6-376dce542f5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.566058] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.566184] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.566307] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcb53c97-8d95-4d67-b310-d19087b0b298 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.566427] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b487291e-1b85-4064-9949-3d8895b6dcae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.566546] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.566664] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.566781] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bf9766c7-1495-4edd-92bd-06a0d036855e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.566893] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1421.578215] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ada24904-c85b-4af9-be4c-afc8514b7307 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1421.592855] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fa0796d0-14e5-4bcc-9571-3193f4c1185e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1421.603255] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3da3b410-889a-42c5-9603-f92f689ab5b5 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1421.613144] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 874d08ae-ce38-4a35-bd3f-5c40a2c9bf97 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1421.622868] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fa78516a-fe6f-4770-9def-ebe439e87adc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1421.632840] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 92ae0029-0d42-4655-9971-6dfbc07df15d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1421.645441] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 95be4f59-e835-4389-93ae-9814e97f8ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1421.653245] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Acquiring lock "bf9766c7-1495-4edd-92bd-06a0d036855e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1421.657082] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 73abf0ba-016c-4536-afd3-f6c6960045fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1421.657320] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1421.657470] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1421.861165] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6dd6f2f-1d0f-48cf-9822-10763f60d893 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.868612] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4f47ec8-d5e0-4937-8f33-afe86f6ced90 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.897358] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bb1e337a-d3ba-468f-b0f7-709592d6c86c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.904351] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dd4c7992-62f3-41a6-91d2-8431863d1b0e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1421.917854] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1421.926848] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1421.940891] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1421.941066] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.455s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1422.941699] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1423.412344] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1424.436306] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1425.416997] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1425.417254] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1425.417410] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1425.438054] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1425.438345] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1425.438345] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1425.438541] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1425.438625] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1425.438765] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1425.438890] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1425.439018] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1425.439142] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1425.439258] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1425.439378] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1427.663115] env[68194]: DEBUG oslo_concurrency.lockutils [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Acquiring lock "7b430b72-05fa-49a6-8bbb-7c083cb96457" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1441.013779] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "2b833505-f170-46ea-8d14-c449f88a7d4c" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1441.014459] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "2b833505-f170-46ea-8d14-c449f88a7d4c" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1445.601735] env[68194]: WARNING oslo_vmware.rw_handles [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1445.601735] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1445.601735] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1445.601735] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1445.601735] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1445.601735] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1445.601735] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1445.601735] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1445.601735] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1445.601735] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1445.601735] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1445.601735] env[68194]: ERROR oslo_vmware.rw_handles [ 1445.602467] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/e5266bd2-30af-49f8-b487-08f04e0703e8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1445.603926] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1445.604185] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Copying Virtual Disk [datastore1] vmware_temp/e5266bd2-30af-49f8-b487-08f04e0703e8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/e5266bd2-30af-49f8-b487-08f04e0703e8/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1445.604466] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-87583d14-754b-4e1a-8f4f-ec441c2d3529 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1445.613302] env[68194]: DEBUG oslo_vmware.api [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Waiting for the task: (returnval){ [ 1445.613302] env[68194]: value = "task-3466894" [ 1445.613302] env[68194]: _type = "Task" [ 1445.613302] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1445.621654] env[68194]: DEBUG oslo_vmware.api [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Task: {'id': task-3466894, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.123721] env[68194]: DEBUG oslo_vmware.exceptions [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1446.124225] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1446.124595] env[68194]: ERROR nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1446.124595] env[68194]: Faults: ['InvalidArgument'] [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Traceback (most recent call last): [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] yield resources [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] self.driver.spawn(context, instance, image_meta, [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] self._fetch_image_if_missing(context, vi) [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] image_cache(vi, tmp_image_ds_loc) [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] vm_util.copy_virtual_disk( [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] session._wait_for_task(vmdk_copy_task) [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] return self.wait_for_task(task_ref) [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] return evt.wait() [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] result = hub.switch() [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] return self.greenlet.switch() [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] self.f(*self.args, **self.kw) [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] raise exceptions.translate_fault(task_info.error) [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Faults: ['InvalidArgument'] [ 1446.124595] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] [ 1446.125639] env[68194]: INFO nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Terminating instance [ 1446.126571] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1446.126784] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1446.127533] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4246a759-445d-4278-8c22-1cb8c71e6b59 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.129746] env[68194]: DEBUG nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1446.129945] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1446.130848] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2114843-b51c-44f5-aed1-af7cd50da324 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.138113] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1446.138334] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d53c10bf-1745-4fd0-9f40-9a970f400e7d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.140685] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1446.140880] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1446.141827] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-49756a2f-1743-4dda-93b5-b8c0ef31568c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.146583] env[68194]: DEBUG oslo_vmware.api [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Waiting for the task: (returnval){ [ 1446.146583] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52bed4b2-2f3e-f262-3513-61d8d182fe38" [ 1446.146583] env[68194]: _type = "Task" [ 1446.146583] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.154989] env[68194]: DEBUG oslo_vmware.api [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52bed4b2-2f3e-f262-3513-61d8d182fe38, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.211634] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1446.211889] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1446.212089] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Deleting the datastore file [datastore1] 20f4ed05-ee86-416b-8bf7-d446d33bab6f {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1446.212440] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-325d8388-765c-43b0-b065-c4ab5877d92e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.219204] env[68194]: DEBUG oslo_vmware.api [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Waiting for the task: (returnval){ [ 1446.219204] env[68194]: value = "task-3466896" [ 1446.219204] env[68194]: _type = "Task" [ 1446.219204] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1446.226719] env[68194]: DEBUG oslo_vmware.api [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Task: {'id': task-3466896, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1446.656594] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1446.656950] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Creating directory with path [datastore1] vmware_temp/7a2f9172-9e59-40b0-8732-a0c0bb2bed4c/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1446.657077] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-608461c1-2dc5-40d7-9c5f-437e8a088bc6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.668791] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Created directory with path [datastore1] vmware_temp/7a2f9172-9e59-40b0-8732-a0c0bb2bed4c/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1446.668980] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Fetch image to [datastore1] vmware_temp/7a2f9172-9e59-40b0-8732-a0c0bb2bed4c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1446.669171] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/7a2f9172-9e59-40b0-8732-a0c0bb2bed4c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1446.669925] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-262c86af-12b3-4fc5-9566-887517d0d842 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.676236] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-58150c97-7cf6-410d-8d5c-d46f3fd3b860 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.684975] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e743461-093e-4039-aa39-269af60c4b6f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.715015] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fbd94759-af45-40a6-876d-e0eb6e894ced {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.722779] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-8387fea7-8a80-4598-aaec-fb9083c885e6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1446.728608] env[68194]: DEBUG oslo_vmware.api [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Task: {'id': task-3466896, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075265} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1446.728829] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1446.729035] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1446.729217] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1446.729391] env[68194]: INFO nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1446.731589] env[68194]: DEBUG nova.compute.claims [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1446.731757] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1446.731968] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1446.744327] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1446.797820] env[68194]: DEBUG oslo_vmware.rw_handles [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7a2f9172-9e59-40b0-8732-a0c0bb2bed4c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1446.856592] env[68194]: DEBUG oslo_vmware.rw_handles [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1446.856773] env[68194]: DEBUG oslo_vmware.rw_handles [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/7a2f9172-9e59-40b0-8732-a0c0bb2bed4c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1447.033971] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93199a31-4f84-4900-bfe0-318357696471 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.041747] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e8f1a357-cbde-403d-b42b-9d817b56309f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.078111] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dafca220-8026-4f39-ae6b-5d4e00855fec {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.086108] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13e07ada-3651-45df-8f6e-f6a743168f2a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.099270] env[68194]: DEBUG nova.compute.provider_tree [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1447.109880] env[68194]: DEBUG nova.scheduler.client.report [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1447.125805] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.394s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1447.126262] env[68194]: ERROR nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1447.126262] env[68194]: Faults: ['InvalidArgument'] [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Traceback (most recent call last): [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] self.driver.spawn(context, instance, image_meta, [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] self._fetch_image_if_missing(context, vi) [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] image_cache(vi, tmp_image_ds_loc) [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] vm_util.copy_virtual_disk( [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] session._wait_for_task(vmdk_copy_task) [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] return self.wait_for_task(task_ref) [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] return evt.wait() [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] result = hub.switch() [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] return self.greenlet.switch() [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] self.f(*self.args, **self.kw) [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] raise exceptions.translate_fault(task_info.error) [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Faults: ['InvalidArgument'] [ 1447.126262] env[68194]: ERROR nova.compute.manager [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] [ 1447.127386] env[68194]: DEBUG nova.compute.utils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1447.128421] env[68194]: DEBUG nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Build of instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f was re-scheduled: A specified parameter was not correct: fileType [ 1447.128421] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1447.128824] env[68194]: DEBUG nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1447.128966] env[68194]: DEBUG nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1447.129172] env[68194]: DEBUG nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1447.129340] env[68194]: DEBUG nova.network.neutron [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1447.549711] env[68194]: DEBUG nova.network.neutron [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.561880] env[68194]: INFO nova.compute.manager [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Took 0.43 seconds to deallocate network for instance. [ 1447.653022] env[68194]: INFO nova.scheduler.client.report [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Deleted allocations for instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f [ 1447.673140] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8b909405-25a7-478d-a06d-6caa881bb3ca tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.032s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1447.674297] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 433.556s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1447.674524] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Acquiring lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1447.674740] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1447.674910] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1447.676926] env[68194]: INFO nova.compute.manager [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Terminating instance [ 1447.678649] env[68194]: DEBUG nova.compute.manager [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1447.678847] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1447.679343] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-a060be1b-946a-474a-b29d-1434233cdf85 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.688935] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-136830c2-4bf1-4ca6-a294-682c3a834db8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1447.719053] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 20f4ed05-ee86-416b-8bf7-d446d33bab6f could not be found. [ 1447.719184] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1447.719344] env[68194]: INFO nova.compute.manager [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1447.719703] env[68194]: DEBUG oslo.service.loopingcall [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1447.720110] env[68194]: DEBUG nova.compute.manager [None req-526474d6-d4b8-43ce-901d-e1604e91dc8f tempest-ServerRescueTestJSONUnderV235-1403302761 tempest-ServerRescueTestJSONUnderV235-1403302761-project-member] [instance: ada24904-c85b-4af9-be4c-afc8514b7307] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1447.722474] env[68194]: DEBUG nova.compute.manager [-] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1447.722576] env[68194]: DEBUG nova.network.neutron [-] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1447.745215] env[68194]: DEBUG nova.compute.manager [None req-526474d6-d4b8-43ce-901d-e1604e91dc8f tempest-ServerRescueTestJSONUnderV235-1403302761 tempest-ServerRescueTestJSONUnderV235-1403302761-project-member] [instance: ada24904-c85b-4af9-be4c-afc8514b7307] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1447.750629] env[68194]: DEBUG nova.network.neutron [-] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1447.757403] env[68194]: INFO nova.compute.manager [-] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] Took 0.03 seconds to deallocate network for instance. [ 1447.764769] env[68194]: DEBUG oslo_concurrency.lockutils [None req-526474d6-d4b8-43ce-901d-e1604e91dc8f tempest-ServerRescueTestJSONUnderV235-1403302761 tempest-ServerRescueTestJSONUnderV235-1403302761-project-member] Lock "ada24904-c85b-4af9-be4c-afc8514b7307" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 207.867s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1447.772795] env[68194]: DEBUG nova.compute.manager [None req-99b934d3-4269-45c1-bc95-69051e00f9b4 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: fa0796d0-14e5-4bcc-9571-3193f4c1185e] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1447.803597] env[68194]: DEBUG nova.compute.manager [None req-99b934d3-4269-45c1-bc95-69051e00f9b4 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: fa0796d0-14e5-4bcc-9571-3193f4c1185e] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1447.822573] env[68194]: DEBUG oslo_concurrency.lockutils [None req-99b934d3-4269-45c1-bc95-69051e00f9b4 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "fa0796d0-14e5-4bcc-9571-3193f4c1185e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 203.218s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1447.830591] env[68194]: DEBUG nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1447.854028] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2855d7ec-12fd-4dd7-871c-f4e99f9dfe62 tempest-ServersAdminTestJSON-1226418108 tempest-ServersAdminTestJSON-1226418108-project-member] Lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.180s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1447.854816] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 236.389s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1447.855010] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 20f4ed05-ee86-416b-8bf7-d446d33bab6f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1447.855195] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "20f4ed05-ee86-416b-8bf7-d446d33bab6f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1447.878744] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1447.878982] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1447.880483] env[68194]: INFO nova.compute.claims [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1448.105525] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-20ca67b2-e6c5-4046-8a0f-22a4f391d39b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.112952] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d31ea116-abaf-4f2c-bde0-cf7c8cf4e285 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.142472] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c3dcab05-8fa9-432f-a977-063f4a3e8f3f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.149999] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f01fd4c-f169-43a5-81d4-fd883be284d9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.164263] env[68194]: DEBUG nova.compute.provider_tree [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1448.172693] env[68194]: DEBUG nova.scheduler.client.report [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1448.185458] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.306s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1448.185971] env[68194]: DEBUG nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1448.222636] env[68194]: DEBUG nova.compute.utils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1448.223909] env[68194]: DEBUG nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1448.224095] env[68194]: DEBUG nova.network.neutron [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1448.236890] env[68194]: DEBUG nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1448.293425] env[68194]: DEBUG nova.policy [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '07679454431e47c4bbf8a7a2740e2baf', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a12cfaa715fe43e0989996a84262ed5c', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1448.309573] env[68194]: DEBUG nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1448.336183] env[68194]: DEBUG nova.virt.hardware [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1448.336487] env[68194]: DEBUG nova.virt.hardware [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1448.336678] env[68194]: DEBUG nova.virt.hardware [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1448.336866] env[68194]: DEBUG nova.virt.hardware [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1448.337023] env[68194]: DEBUG nova.virt.hardware [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1448.337436] env[68194]: DEBUG nova.virt.hardware [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1448.337436] env[68194]: DEBUG nova.virt.hardware [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1448.337542] env[68194]: DEBUG nova.virt.hardware [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1448.337789] env[68194]: DEBUG nova.virt.hardware [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1448.337847] env[68194]: DEBUG nova.virt.hardware [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1448.338073] env[68194]: DEBUG nova.virt.hardware [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1448.339033] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de1a5a8c-afc7-44a8-a44d-62cdddec008c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.347208] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-703874a3-7317-41a1-b910-0282d34062c1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1448.605030] env[68194]: DEBUG nova.network.neutron [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Successfully created port: 4c025104-1132-4b2c-b927-1337184ab756 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1449.160996] env[68194]: DEBUG nova.network.neutron [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Successfully updated port: 4c025104-1132-4b2c-b927-1337184ab756 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1449.176210] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Acquiring lock "refresh_cache-3da3b410-889a-42c5-9603-f92f689ab5b5" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1449.176369] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Acquired lock "refresh_cache-3da3b410-889a-42c5-9603-f92f689ab5b5" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1449.176526] env[68194]: DEBUG nova.network.neutron [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1449.214967] env[68194]: DEBUG nova.network.neutron [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1449.376214] env[68194]: DEBUG nova.network.neutron [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Updating instance_info_cache with network_info: [{"id": "4c025104-1132-4b2c-b927-1337184ab756", "address": "fa:16:3e:4c:82:f1", "network": {"id": "c68d29b1-b800-4eab-945b-ed2e62e0acf6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-779324002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a12cfaa715fe43e0989996a84262ed5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c025104-11", "ovs_interfaceid": "4c025104-1132-4b2c-b927-1337184ab756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.388523] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Releasing lock "refresh_cache-3da3b410-889a-42c5-9603-f92f689ab5b5" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1449.388908] env[68194]: DEBUG nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Instance network_info: |[{"id": "4c025104-1132-4b2c-b927-1337184ab756", "address": "fa:16:3e:4c:82:f1", "network": {"id": "c68d29b1-b800-4eab-945b-ed2e62e0acf6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-779324002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a12cfaa715fe43e0989996a84262ed5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c025104-11", "ovs_interfaceid": "4c025104-1132-4b2c-b927-1337184ab756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1449.389273] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:4c:82:f1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '47ca1ce6-8148-48d5-bcfe-89e39b73914e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '4c025104-1132-4b2c-b927-1337184ab756', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1449.396882] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Creating folder: Project (a12cfaa715fe43e0989996a84262ed5c). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1449.397451] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-a8a56e5a-b790-4cd6-a48b-773d4a150cd1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.409580] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Created folder: Project (a12cfaa715fe43e0989996a84262ed5c) in parent group-v692426. [ 1449.409770] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Creating folder: Instances. Parent ref: group-v692518. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1449.410034] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-68b5109c-6d39-4116-9763-1d3db203376f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.419319] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Created folder: Instances in parent group-v692518. [ 1449.419543] env[68194]: DEBUG oslo.service.loopingcall [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1449.419717] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1449.419931] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-70f00571-69ca-4c67-9f21-5d03ac6a3f4d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.439484] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1449.439484] env[68194]: value = "task-3466899" [ 1449.439484] env[68194]: _type = "Task" [ 1449.439484] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.447271] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466899, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1449.614674] env[68194]: DEBUG nova.compute.manager [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Received event network-vif-plugged-4c025104-1132-4b2c-b927-1337184ab756 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1449.614966] env[68194]: DEBUG oslo_concurrency.lockutils [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] Acquiring lock "3da3b410-889a-42c5-9603-f92f689ab5b5-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1449.615442] env[68194]: DEBUG oslo_concurrency.lockutils [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] Lock "3da3b410-889a-42c5-9603-f92f689ab5b5-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1449.615700] env[68194]: DEBUG oslo_concurrency.lockutils [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] Lock "3da3b410-889a-42c5-9603-f92f689ab5b5-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1449.615897] env[68194]: DEBUG nova.compute.manager [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] No waiting events found dispatching network-vif-plugged-4c025104-1132-4b2c-b927-1337184ab756 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1449.616477] env[68194]: WARNING nova.compute.manager [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Received unexpected event network-vif-plugged-4c025104-1132-4b2c-b927-1337184ab756 for instance with vm_state building and task_state spawning. [ 1449.616704] env[68194]: DEBUG nova.compute.manager [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Received event network-changed-4c025104-1132-4b2c-b927-1337184ab756 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1449.616876] env[68194]: DEBUG nova.compute.manager [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Refreshing instance network info cache due to event network-changed-4c025104-1132-4b2c-b927-1337184ab756. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1449.617104] env[68194]: DEBUG oslo_concurrency.lockutils [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] Acquiring lock "refresh_cache-3da3b410-889a-42c5-9603-f92f689ab5b5" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1449.617270] env[68194]: DEBUG oslo_concurrency.lockutils [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] Acquired lock "refresh_cache-3da3b410-889a-42c5-9603-f92f689ab5b5" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1449.617444] env[68194]: DEBUG nova.network.neutron [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Refreshing network info cache for port 4c025104-1132-4b2c-b927-1337184ab756 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1449.865877] env[68194]: DEBUG nova.network.neutron [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Updated VIF entry in instance network info cache for port 4c025104-1132-4b2c-b927-1337184ab756. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1449.866230] env[68194]: DEBUG nova.network.neutron [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Updating instance_info_cache with network_info: [{"id": "4c025104-1132-4b2c-b927-1337184ab756", "address": "fa:16:3e:4c:82:f1", "network": {"id": "c68d29b1-b800-4eab-945b-ed2e62e0acf6", "bridge": "br-int", "label": "tempest-ServerActionsTestOtherB-779324002-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.8", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a12cfaa715fe43e0989996a84262ed5c", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47ca1ce6-8148-48d5-bcfe-89e39b73914e", "external-id": "nsx-vlan-transportzone-259", "segmentation_id": 259, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap4c025104-11", "ovs_interfaceid": "4c025104-1132-4b2c-b927-1337184ab756", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1449.875585] env[68194]: DEBUG oslo_concurrency.lockutils [req-ddae218b-eba1-496b-a13e-6671a2bbfc23 req-1699a28e-912b-4453-a9ef-bd9c5c1a6a5a service nova] Releasing lock "refresh_cache-3da3b410-889a-42c5-9603-f92f689ab5b5" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1449.949426] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466899, 'name': CreateVM_Task, 'duration_secs': 0.313488} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1449.949632] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1449.950402] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1449.950601] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1449.950929] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1449.951208] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7f519998-c558-4e34-8493-8f7a46ce4afd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1449.955573] env[68194]: DEBUG oslo_vmware.api [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Waiting for the task: (returnval){ [ 1449.955573] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52889de2-58c8-b3bb-47a2-d88fde9faa46" [ 1449.955573] env[68194]: _type = "Task" [ 1449.955573] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1449.962792] env[68194]: DEBUG oslo_vmware.api [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52889de2-58c8-b3bb-47a2-d88fde9faa46, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1450.465895] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1450.466183] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1450.466422] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1456.344807] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1456.345145] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1464.750315] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Acquiring lock "3da3b410-889a-42c5-9603-f92f689ab5b5" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1476.417669] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1476.417993] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1477.417613] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1477.417806] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Cleaning up deleted instances {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1477.431437] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] There are 0 instances to clean {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1480.431046] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1480.431400] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1481.416595] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1482.417432] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1482.417708] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.424393] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1483.436096] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1483.436352] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1483.436528] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1483.436688] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1483.437897] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b6feb2ea-6eb5-4aa8-baf3-770e0f61d656 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.448452] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303b8585-e00e-477d-a4ac-ddd17cc606da {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.462525] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9eda5aa4-2cb4-4c24-aa29-01db3e8168ab {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.468920] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98ff98d6-9168-46d3-b77f-1a579df01838 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.497625] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180972MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1483.497818] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1483.498096] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1483.672598] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance ce0f1886-189f-4ab3-9ed6-376dce542f5f actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1483.672788] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1483.672936] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1483.673076] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcb53c97-8d95-4d67-b310-d19087b0b298 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1483.673202] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b487291e-1b85-4064-9949-3d8895b6dcae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1483.673325] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1483.673444] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1483.673565] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bf9766c7-1495-4edd-92bd-06a0d036855e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1483.673681] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1483.673796] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3da3b410-889a-42c5-9603-f92f689ab5b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1483.686045] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance fa78516a-fe6f-4770-9def-ebe439e87adc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1483.697164] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 92ae0029-0d42-4655-9971-6dfbc07df15d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1483.707015] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 95be4f59-e835-4389-93ae-9814e97f8ef4 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1483.716664] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 73abf0ba-016c-4536-afd3-f6c6960045fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1483.726486] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2b833505-f170-46ea-8d14-c449f88a7d4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1483.737667] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1483.737901] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1483.738065] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1483.754126] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing inventories for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1483.771451] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Updating ProviderTree inventory for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1483.771660] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Updating inventory in ProviderTree for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1483.783103] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing aggregate associations for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5, aggregates: None {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1483.800738] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing trait associations for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1483.970776] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0dd56a49-d035-4119-9664-9581df7ca12a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1483.979829] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c396de1b-8e76-4c29-8f63-ec11d803130e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.010998] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-60ff8b8a-d005-40fb-9de1-666dc3a8d9dc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.019128] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-006bcb3c-bf11-4da0-a679-ab00215cf980 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1484.032946] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1484.041984] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1484.056141] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1484.056336] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.558s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1485.044229] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1485.044603] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1485.417344] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1485.417713] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1485.417973] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1485.445483] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1485.450283] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1485.450283] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1485.450283] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1485.450283] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1485.450283] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1485.450283] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1485.450283] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1485.450283] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1485.450283] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1485.450283] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1489.416641] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1489.417015] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Cleaning up deleted instances with incomplete migration {{(pid=68194) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 1490.173976] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7f14011d-8ba7-45f5-a611-3321fa27a1f8 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] Acquiring lock "dc7215fa-bc03-464e-81f0-22636be16748" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1490.174221] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7f14011d-8ba7-45f5-a611-3321fa27a1f8 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] Lock "dc7215fa-bc03-464e-81f0-22636be16748" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1495.618327] env[68194]: WARNING oslo_vmware.rw_handles [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1495.618327] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1495.618327] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1495.618327] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1495.618327] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1495.618327] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1495.618327] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1495.618327] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1495.618327] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1495.618327] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1495.618327] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1495.618327] env[68194]: ERROR oslo_vmware.rw_handles [ 1495.619208] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/7a2f9172-9e59-40b0-8732-a0c0bb2bed4c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1495.620768] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1495.621039] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Copying Virtual Disk [datastore1] vmware_temp/7a2f9172-9e59-40b0-8732-a0c0bb2bed4c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/7a2f9172-9e59-40b0-8732-a0c0bb2bed4c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1495.621325] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-94646f7c-8cc2-481f-ac70-1a9bc0c530a5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1495.628998] env[68194]: DEBUG oslo_vmware.api [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Waiting for the task: (returnval){ [ 1495.628998] env[68194]: value = "task-3466900" [ 1495.628998] env[68194]: _type = "Task" [ 1495.628998] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1495.636632] env[68194]: DEBUG oslo_vmware.api [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Task: {'id': task-3466900, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1496.139954] env[68194]: DEBUG oslo_vmware.exceptions [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1496.140101] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1496.141189] env[68194]: ERROR nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1496.141189] env[68194]: Faults: ['InvalidArgument'] [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Traceback (most recent call last): [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] yield resources [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] self.driver.spawn(context, instance, image_meta, [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] self._fetch_image_if_missing(context, vi) [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] image_cache(vi, tmp_image_ds_loc) [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] vm_util.copy_virtual_disk( [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] session._wait_for_task(vmdk_copy_task) [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] return self.wait_for_task(task_ref) [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] return evt.wait() [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] result = hub.switch() [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] return self.greenlet.switch() [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] self.f(*self.args, **self.kw) [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] raise exceptions.translate_fault(task_info.error) [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Faults: ['InvalidArgument'] [ 1496.141189] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] [ 1496.141189] env[68194]: INFO nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Terminating instance [ 1496.142455] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1496.142666] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1496.143338] env[68194]: DEBUG nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1496.143532] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1496.143764] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-397884ce-3145-494e-b689-569c7bf0408d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.146082] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1ada3af1-d360-4f3a-ad34-638c1963f014 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.153213] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1496.153484] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0f4bf2c2-feff-416e-ad0e-efffe421c63a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.155674] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1496.155853] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1496.156832] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a8bd9c58-73d2-44de-862c-2176c9e4916b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.161735] env[68194]: DEBUG oslo_vmware.api [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Waiting for the task: (returnval){ [ 1496.161735] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]522c7c25-e0a9-daa6-0a83-cf3277157365" [ 1496.161735] env[68194]: _type = "Task" [ 1496.161735] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1496.176010] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1496.176255] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Creating directory with path [datastore1] vmware_temp/5acddf4f-b2b3-48ee-af7a-921630913117/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1496.176464] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5f6f09dc-7b02-4b6d-a980-c01d2dcd4bee {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.197009] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Created directory with path [datastore1] vmware_temp/5acddf4f-b2b3-48ee-af7a-921630913117/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1496.197244] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Fetch image to [datastore1] vmware_temp/5acddf4f-b2b3-48ee-af7a-921630913117/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1496.197420] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/5acddf4f-b2b3-48ee-af7a-921630913117/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1496.198210] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ba791d1-56a1-426f-ba51-0614f9516094 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.206042] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-576f5bf8-1bad-4c31-8eb9-3aafac198c12 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.215772] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7edb44f-1ef9-4738-a7db-3b6b14452faa {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.245847] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2677e603-2eeb-470e-a72c-cbf37b6b000d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.252534] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-3e02167a-8cf0-4632-8546-fd5271e85802 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.275712] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1496.327826] env[68194]: DEBUG oslo_vmware.rw_handles [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5acddf4f-b2b3-48ee-af7a-921630913117/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1496.387099] env[68194]: DEBUG oslo_vmware.rw_handles [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1496.387262] env[68194]: DEBUG oslo_vmware.rw_handles [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/5acddf4f-b2b3-48ee-af7a-921630913117/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1496.981987] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1496.982370] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1496.982614] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Deleting the datastore file [datastore1] ce0f1886-189f-4ab3-9ed6-376dce542f5f {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1496.982976] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-6d7a07eb-28f3-4784-a037-c02e9748820e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1496.990522] env[68194]: DEBUG oslo_vmware.api [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Waiting for the task: (returnval){ [ 1496.990522] env[68194]: value = "task-3466902" [ 1496.990522] env[68194]: _type = "Task" [ 1496.990522] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1497.002727] env[68194]: DEBUG oslo_vmware.api [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Task: {'id': task-3466902, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1497.500226] env[68194]: DEBUG oslo_vmware.api [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Task: {'id': task-3466902, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075217} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1497.500388] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1497.500570] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1497.500770] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1497.500955] env[68194]: INFO nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Took 1.36 seconds to destroy the instance on the hypervisor. [ 1497.503102] env[68194]: DEBUG nova.compute.claims [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1497.503260] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1497.503473] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1497.728126] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dfbd2ed-54cb-429a-847c-4f606c048c68 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.735880] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a93898cb-deb2-4725-9042-0fe69558d62c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.765758] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4062c9d-e881-47ab-a0ff-27102967434f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.773073] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6b3019f6-4d79-49b2-921a-3369449eab67 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1497.785947] env[68194]: DEBUG nova.compute.provider_tree [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1497.795081] env[68194]: DEBUG nova.scheduler.client.report [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1497.810346] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.307s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1497.810902] env[68194]: ERROR nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1497.810902] env[68194]: Faults: ['InvalidArgument'] [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Traceback (most recent call last): [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] self.driver.spawn(context, instance, image_meta, [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] self._fetch_image_if_missing(context, vi) [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] image_cache(vi, tmp_image_ds_loc) [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] vm_util.copy_virtual_disk( [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] session._wait_for_task(vmdk_copy_task) [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] return self.wait_for_task(task_ref) [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] return evt.wait() [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] result = hub.switch() [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] return self.greenlet.switch() [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] self.f(*self.args, **self.kw) [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] raise exceptions.translate_fault(task_info.error) [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Faults: ['InvalidArgument'] [ 1497.810902] env[68194]: ERROR nova.compute.manager [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] [ 1497.811755] env[68194]: DEBUG nova.compute.utils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1497.813174] env[68194]: DEBUG nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Build of instance ce0f1886-189f-4ab3-9ed6-376dce542f5f was re-scheduled: A specified parameter was not correct: fileType [ 1497.813174] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1497.813497] env[68194]: DEBUG nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1497.813674] env[68194]: DEBUG nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1497.813972] env[68194]: DEBUG nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1497.814221] env[68194]: DEBUG nova.network.neutron [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1498.153512] env[68194]: DEBUG nova.network.neutron [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1498.163610] env[68194]: INFO nova.compute.manager [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Took 0.35 seconds to deallocate network for instance. [ 1498.267602] env[68194]: INFO nova.scheduler.client.report [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Deleted allocations for instance ce0f1886-189f-4ab3-9ed6-376dce542f5f [ 1498.289196] env[68194]: DEBUG oslo_concurrency.lockutils [None req-60583d42-8073-4339-978a-8cf9ce4bfc96 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.505s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1498.290317] env[68194]: DEBUG oslo_concurrency.lockutils [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 434.126s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1498.290537] env[68194]: DEBUG oslo_concurrency.lockutils [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Acquiring lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1498.290902] env[68194]: DEBUG oslo_concurrency.lockutils [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1498.291658] env[68194]: DEBUG oslo_concurrency.lockutils [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1498.293281] env[68194]: INFO nova.compute.manager [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Terminating instance [ 1498.295038] env[68194]: DEBUG nova.compute.manager [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1498.295236] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1498.296019] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ed31e7d3-fe7d-498f-a00d-11949ddc62f8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.304558] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-185b1738-10b4-47ca-ad5c-ce37b99c3bdc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.316052] env[68194]: DEBUG nova.compute.manager [None req-c357d513-6539-4b96-869c-0eccd3af3b4d tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] [instance: 874d08ae-ce38-4a35-bd3f-5c40a2c9bf97] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1498.336250] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance ce0f1886-189f-4ab3-9ed6-376dce542f5f could not be found. [ 1498.336461] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1498.336638] env[68194]: INFO nova.compute.manager [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1498.336882] env[68194]: DEBUG oslo.service.loopingcall [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1498.337127] env[68194]: DEBUG nova.compute.manager [-] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1498.337228] env[68194]: DEBUG nova.network.neutron [-] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1498.340963] env[68194]: DEBUG nova.compute.manager [None req-c357d513-6539-4b96-869c-0eccd3af3b4d tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] [instance: 874d08ae-ce38-4a35-bd3f-5c40a2c9bf97] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1498.359136] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c357d513-6539-4b96-869c-0eccd3af3b4d tempest-AttachInterfacesTestJSON-512612418 tempest-AttachInterfacesTestJSON-512612418-project-member] Lock "874d08ae-ce38-4a35-bd3f-5c40a2c9bf97" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 223.801s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1498.360888] env[68194]: DEBUG nova.network.neutron [-] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1498.368127] env[68194]: DEBUG nova.compute.manager [None req-8c1ed091-5e97-43b1-8522-3899ee592014 tempest-AttachVolumeTestJSON-1211419677 tempest-AttachVolumeTestJSON-1211419677-project-member] [instance: fa78516a-fe6f-4770-9def-ebe439e87adc] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1498.370371] env[68194]: INFO nova.compute.manager [-] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] Took 0.03 seconds to deallocate network for instance. [ 1498.391388] env[68194]: DEBUG nova.compute.manager [None req-8c1ed091-5e97-43b1-8522-3899ee592014 tempest-AttachVolumeTestJSON-1211419677 tempest-AttachVolumeTestJSON-1211419677-project-member] [instance: fa78516a-fe6f-4770-9def-ebe439e87adc] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1498.415132] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8c1ed091-5e97-43b1-8522-3899ee592014 tempest-AttachVolumeTestJSON-1211419677 tempest-AttachVolumeTestJSON-1211419677-project-member] Lock "fa78516a-fe6f-4770-9def-ebe439e87adc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 210.746s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1498.426641] env[68194]: DEBUG nova.compute.manager [None req-b0e1c300-81b3-4f34-b51c-2d199c8152f0 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] [instance: 92ae0029-0d42-4655-9971-6dfbc07df15d] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1498.451729] env[68194]: DEBUG nova.compute.manager [None req-b0e1c300-81b3-4f34-b51c-2d199c8152f0 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] [instance: 92ae0029-0d42-4655-9971-6dfbc07df15d] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1498.462254] env[68194]: DEBUG oslo_concurrency.lockutils [None req-af2c78fd-0acc-42b5-b423-556f47926d86 tempest-SecurityGroupsTestJSON-1230207051 tempest-SecurityGroupsTestJSON-1230207051-project-member] Lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.171s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1498.462342] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 286.996s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1498.462544] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: ce0f1886-189f-4ab3-9ed6-376dce542f5f] During sync_power_state the instance has a pending task (deleting). Skip. [ 1498.462720] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "ce0f1886-189f-4ab3-9ed6-376dce542f5f" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1498.475887] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b0e1c300-81b3-4f34-b51c-2d199c8152f0 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] Lock "92ae0029-0d42-4655-9971-6dfbc07df15d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.736s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1498.484319] env[68194]: DEBUG nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1498.532354] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1498.532597] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1498.534011] env[68194]: INFO nova.compute.claims [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1498.745367] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3535f67f-3ad6-4424-b419-2088b07bfae1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.752944] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e49b8e4d-35eb-4365-baf6-45733eff96c4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.782089] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d269137-139d-4ee2-8296-2cb30b2f8446 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.788370] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98447818-a0a9-4877-a54b-d04233796d73 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.801146] env[68194]: DEBUG nova.compute.provider_tree [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1498.809452] env[68194]: DEBUG nova.scheduler.client.report [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1498.823381] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.291s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1498.823850] env[68194]: DEBUG nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1498.854364] env[68194]: DEBUG nova.compute.utils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1498.855492] env[68194]: DEBUG nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1498.855665] env[68194]: DEBUG nova.network.neutron [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1498.863836] env[68194]: DEBUG nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1498.913480] env[68194]: DEBUG nova.policy [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '31d74bfb8537483e8adf50b60f8b635a', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'defbdc952aac495caf13c7cc9ead3a53', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1498.927340] env[68194]: DEBUG nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1498.953219] env[68194]: DEBUG nova.virt.hardware [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1498.953470] env[68194]: DEBUG nova.virt.hardware [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1498.953655] env[68194]: DEBUG nova.virt.hardware [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1498.953918] env[68194]: DEBUG nova.virt.hardware [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1498.954038] env[68194]: DEBUG nova.virt.hardware [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1498.954193] env[68194]: DEBUG nova.virt.hardware [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1498.954401] env[68194]: DEBUG nova.virt.hardware [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1498.954562] env[68194]: DEBUG nova.virt.hardware [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1498.954727] env[68194]: DEBUG nova.virt.hardware [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1498.954891] env[68194]: DEBUG nova.virt.hardware [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1498.955081] env[68194]: DEBUG nova.virt.hardware [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1498.955931] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-76da70ac-fc75-4d5d-944b-041e044a6094 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1498.964121] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b72459d1-8050-445b-b37e-732be9483913 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1499.215496] env[68194]: DEBUG nova.network.neutron [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Successfully created port: 19212f0e-9d95-46c8-94ca-12440257d995 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1499.776702] env[68194]: DEBUG nova.network.neutron [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Successfully updated port: 19212f0e-9d95-46c8-94ca-12440257d995 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1499.792168] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "refresh_cache-95be4f59-e835-4389-93ae-9814e97f8ef4" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1499.792386] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquired lock "refresh_cache-95be4f59-e835-4389-93ae-9814e97f8ef4" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1499.792567] env[68194]: DEBUG nova.network.neutron [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1499.835075] env[68194]: DEBUG nova.network.neutron [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1499.993782] env[68194]: DEBUG nova.network.neutron [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Updating instance_info_cache with network_info: [{"id": "19212f0e-9d95-46c8-94ca-12440257d995", "address": "fa:16:3e:06:54:47", "network": {"id": "58e5fa41-0570-4409-8c1e-7c52fd2c7a7a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1202426316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "defbdc952aac495caf13c7cc9ead3a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19212f0e-9d", "ovs_interfaceid": "19212f0e-9d95-46c8-94ca-12440257d995", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.008738] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Releasing lock "refresh_cache-95be4f59-e835-4389-93ae-9814e97f8ef4" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1500.009065] env[68194]: DEBUG nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Instance network_info: |[{"id": "19212f0e-9d95-46c8-94ca-12440257d995", "address": "fa:16:3e:06:54:47", "network": {"id": "58e5fa41-0570-4409-8c1e-7c52fd2c7a7a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1202426316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "defbdc952aac495caf13c7cc9ead3a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19212f0e-9d", "ovs_interfaceid": "19212f0e-9d95-46c8-94ca-12440257d995", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1500.009468] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:06:54:47', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '19212f0e-9d95-46c8-94ca-12440257d995', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1500.016845] env[68194]: DEBUG oslo.service.loopingcall [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1500.017330] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1500.017554] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-caada82d-6713-42e3-a81a-2148d60dbdda {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.037321] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1500.037321] env[68194]: value = "task-3466903" [ 1500.037321] env[68194]: _type = "Task" [ 1500.037321] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.044622] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466903, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1500.212897] env[68194]: DEBUG nova.compute.manager [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Received event network-vif-plugged-19212f0e-9d95-46c8-94ca-12440257d995 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1500.213157] env[68194]: DEBUG oslo_concurrency.lockutils [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] Acquiring lock "95be4f59-e835-4389-93ae-9814e97f8ef4-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1500.213398] env[68194]: DEBUG oslo_concurrency.lockutils [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] Lock "95be4f59-e835-4389-93ae-9814e97f8ef4-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1500.213633] env[68194]: DEBUG oslo_concurrency.lockutils [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] Lock "95be4f59-e835-4389-93ae-9814e97f8ef4-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1500.213806] env[68194]: DEBUG nova.compute.manager [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] No waiting events found dispatching network-vif-plugged-19212f0e-9d95-46c8-94ca-12440257d995 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1500.214022] env[68194]: WARNING nova.compute.manager [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Received unexpected event network-vif-plugged-19212f0e-9d95-46c8-94ca-12440257d995 for instance with vm_state building and task_state spawning. [ 1500.214215] env[68194]: DEBUG nova.compute.manager [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Received event network-changed-19212f0e-9d95-46c8-94ca-12440257d995 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1500.214433] env[68194]: DEBUG nova.compute.manager [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Refreshing instance network info cache due to event network-changed-19212f0e-9d95-46c8-94ca-12440257d995. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1500.214660] env[68194]: DEBUG oslo_concurrency.lockutils [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] Acquiring lock "refresh_cache-95be4f59-e835-4389-93ae-9814e97f8ef4" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1500.214828] env[68194]: DEBUG oslo_concurrency.lockutils [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] Acquired lock "refresh_cache-95be4f59-e835-4389-93ae-9814e97f8ef4" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1500.214996] env[68194]: DEBUG nova.network.neutron [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Refreshing network info cache for port 19212f0e-9d95-46c8-94ca-12440257d995 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1500.456081] env[68194]: DEBUG nova.network.neutron [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Updated VIF entry in instance network info cache for port 19212f0e-9d95-46c8-94ca-12440257d995. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1500.456449] env[68194]: DEBUG nova.network.neutron [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Updating instance_info_cache with network_info: [{"id": "19212f0e-9d95-46c8-94ca-12440257d995", "address": "fa:16:3e:06:54:47", "network": {"id": "58e5fa41-0570-4409-8c1e-7c52fd2c7a7a", "bridge": "br-int", "label": "tempest-VolumesAdminNegativeTest-1202426316-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "defbdc952aac495caf13c7cc9ead3a53", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "8b1a0b17-d008-4a8b-be2a-796ff1a9a2d1", "external-id": "nsx-vlan-transportzone-880", "segmentation_id": 880, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap19212f0e-9d", "ovs_interfaceid": "19212f0e-9d95-46c8-94ca-12440257d995", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1500.466678] env[68194]: DEBUG oslo_concurrency.lockutils [req-1fad02f8-773e-4f95-975f-d670f500bf35 req-c5017da3-aba4-48a5-b594-b8e23fdf44f9 service nova] Releasing lock "refresh_cache-95be4f59-e835-4389-93ae-9814e97f8ef4" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1500.547893] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466903, 'name': CreateVM_Task, 'duration_secs': 0.305463} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1500.548107] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1500.548779] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1500.548955] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1500.549315] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1500.549574] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-067124ba-e1b6-44f7-b250-22b115e96978 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1500.554449] env[68194]: DEBUG oslo_vmware.api [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Waiting for the task: (returnval){ [ 1500.554449] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52f020c0-acff-7f2e-8a3d-ece73a3e8798" [ 1500.554449] env[68194]: _type = "Task" [ 1500.554449] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1500.562244] env[68194]: DEBUG oslo_vmware.api [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52f020c0-acff-7f2e-8a3d-ece73a3e8798, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1501.065670] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1501.065947] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1501.066181] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1504.658721] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "95be4f59-e835-4389-93ae-9814e97f8ef4" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1536.429443] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1536.429975] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1541.417529] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1542.417336] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1543.416660] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1544.416474] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1544.691775] env[68194]: WARNING oslo_vmware.rw_handles [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1544.691775] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1544.691775] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1544.691775] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1544.691775] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1544.691775] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1544.691775] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1544.691775] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1544.691775] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1544.691775] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1544.691775] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1544.691775] env[68194]: ERROR oslo_vmware.rw_handles [ 1544.692508] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/5acddf4f-b2b3-48ee-af7a-921630913117/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1544.694898] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1544.695265] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Copying Virtual Disk [datastore1] vmware_temp/5acddf4f-b2b3-48ee-af7a-921630913117/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/5acddf4f-b2b3-48ee-af7a-921630913117/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1544.695662] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ade4f492-a199-446a-bb89-e9bdf205ec6e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1544.704694] env[68194]: DEBUG oslo_vmware.api [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Waiting for the task: (returnval){ [ 1544.704694] env[68194]: value = "task-3466904" [ 1544.704694] env[68194]: _type = "Task" [ 1544.704694] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1544.715429] env[68194]: DEBUG oslo_vmware.api [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Task: {'id': task-3466904, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.215370] env[68194]: DEBUG oslo_vmware.exceptions [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1545.215681] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1545.216267] env[68194]: ERROR nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1545.216267] env[68194]: Faults: ['InvalidArgument'] [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Traceback (most recent call last): [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] yield resources [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] self.driver.spawn(context, instance, image_meta, [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] self._fetch_image_if_missing(context, vi) [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] image_cache(vi, tmp_image_ds_loc) [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] vm_util.copy_virtual_disk( [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] session._wait_for_task(vmdk_copy_task) [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] return self.wait_for_task(task_ref) [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] return evt.wait() [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] result = hub.switch() [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] return self.greenlet.switch() [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] self.f(*self.args, **self.kw) [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] raise exceptions.translate_fault(task_info.error) [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Faults: ['InvalidArgument'] [ 1545.216267] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] [ 1545.217299] env[68194]: INFO nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Terminating instance [ 1545.218892] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1545.219128] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1545.219788] env[68194]: DEBUG nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1545.219984] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1545.220238] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-013afc11-fb08-4299-aabe-77baf124da80 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.222865] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8dbf2aa6-1c31-4906-9ece-221ac2d0c648 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.229382] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1545.229607] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-450f6802-eab3-4fd0-9c40-06202343fa6a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.231924] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1545.232167] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1545.233155] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-b80cc798-e228-4e49-b60c-a190bbb600dd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.237688] env[68194]: DEBUG oslo_vmware.api [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Waiting for the task: (returnval){ [ 1545.237688] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]529de3cf-d313-07b4-6d66-d229639d4054" [ 1545.237688] env[68194]: _type = "Task" [ 1545.237688] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.245471] env[68194]: DEBUG oslo_vmware.api [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]529de3cf-d313-07b4-6d66-d229639d4054, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.300807] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1545.301049] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1545.301212] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Deleting the datastore file [datastore1] 47cc57d7-40db-4a19-a983-f4e9ea9e8984 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1545.301495] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-97ba598f-6427-40de-a09a-ca9f6d4e2878 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.307751] env[68194]: DEBUG oslo_vmware.api [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Waiting for the task: (returnval){ [ 1545.307751] env[68194]: value = "task-3466906" [ 1545.307751] env[68194]: _type = "Task" [ 1545.307751] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1545.315429] env[68194]: DEBUG oslo_vmware.api [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Task: {'id': task-3466906, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1545.411864] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1545.415537] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1545.415697] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1545.415829] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1545.438872] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1545.439049] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1545.439188] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1545.439316] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1545.439572] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1545.439786] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1545.439913] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1545.440052] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1545.440181] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1545.440302] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1545.440428] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1545.440969] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1545.441180] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1545.455531] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1545.455531] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1545.455531] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1545.455691] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1545.456739] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c999f95e-98a5-4a96-bfb4-f4ee9dbaf056 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.465585] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f0778c0-d815-4f86-93ea-af312180428d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.479098] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95c953f6-6524-4533-9ef9-b5e5debaa2b1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.485268] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-44d3df43-977a-4ae0-9c02-8b729c788ec0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.514565] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180963MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1545.514724] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1545.514920] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1545.599303] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1545.599577] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1545.599723] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcb53c97-8d95-4d67-b310-d19087b0b298 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1545.599851] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b487291e-1b85-4064-9949-3d8895b6dcae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1545.599974] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1545.600119] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1545.600242] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bf9766c7-1495-4edd-92bd-06a0d036855e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1545.600371] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1545.600476] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3da3b410-889a-42c5-9603-f92f689ab5b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1545.600591] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 95be4f59-e835-4389-93ae-9814e97f8ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1545.611062] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 73abf0ba-016c-4536-afd3-f6c6960045fc has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1545.620892] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2b833505-f170-46ea-8d14-c449f88a7d4c has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1545.631181] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1545.641331] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance dc7215fa-bc03-464e-81f0-22636be16748 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1545.641614] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1545.641757] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1545.748776] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1545.749039] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Creating directory with path [datastore1] vmware_temp/de31cf65-a670-4395-a78a-5652e95389bc/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1545.749447] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-632b87d6-41aa-45d1-85d5-9a7ba7b85b05 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.760066] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Created directory with path [datastore1] vmware_temp/de31cf65-a670-4395-a78a-5652e95389bc/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1545.760264] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Fetch image to [datastore1] vmware_temp/de31cf65-a670-4395-a78a-5652e95389bc/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1545.760435] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/de31cf65-a670-4395-a78a-5652e95389bc/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1545.761157] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-96720dd4-b1cb-4af0-baf5-380e1c25ec2a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.769394] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-24489992-c11f-4e93-a237-31e8379812f6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.778440] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9691a7b2-89fb-4418-8142-78f1167b9814 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.815398] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a527e0e3-da6c-4545-9807-c6b8eee41247 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.822595] env[68194]: DEBUG oslo_vmware.api [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Task: {'id': task-3466906, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.064882} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1545.824031] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1545.824232] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1545.824408] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1545.824581] env[68194]: INFO nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1545.826992] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-203f727f-5f76-4299-9f28-70d171229668 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.829068] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86aa33eb-00b7-4232-b378-38ef76afc0eb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.831575] env[68194]: DEBUG nova.compute.claims [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1545.831719] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1545.835979] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a296ef5-6813-4416-ab45-fa73b843adc6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.865097] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f8527239-4ce2-483f-bbee-4d231da9d057 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.867601] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1545.873728] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5f7b575-557e-4cd5-a561-4039cd0b25e0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1545.886330] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1545.894140] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1545.906869] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1545.907065] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.392s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1545.907335] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.076s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1545.924794] env[68194]: DEBUG oslo_vmware.rw_handles [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/de31cf65-a670-4395-a78a-5652e95389bc/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1545.985307] env[68194]: DEBUG oslo_vmware.rw_handles [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1545.985489] env[68194]: DEBUG oslo_vmware.rw_handles [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/de31cf65-a670-4395-a78a-5652e95389bc/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1546.145074] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57954eeb-1090-4fbd-8049-10ef3f2b6bb8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.152279] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1a5578c8-f36b-430a-8c5b-42ca4cf61ee1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.181821] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5135fa3c-9ad8-49d2-8006-b06bd6062ff2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.188414] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18e7480f-dd84-45d3-b5c1-7565f2a2085f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.201322] env[68194]: DEBUG nova.compute.provider_tree [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1546.209865] env[68194]: DEBUG nova.scheduler.client.report [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1546.228390] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.321s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1546.228926] env[68194]: ERROR nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1546.228926] env[68194]: Faults: ['InvalidArgument'] [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Traceback (most recent call last): [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] self.driver.spawn(context, instance, image_meta, [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] self._fetch_image_if_missing(context, vi) [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] image_cache(vi, tmp_image_ds_loc) [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] vm_util.copy_virtual_disk( [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] session._wait_for_task(vmdk_copy_task) [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] return self.wait_for_task(task_ref) [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] return evt.wait() [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] result = hub.switch() [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] return self.greenlet.switch() [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] self.f(*self.args, **self.kw) [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] raise exceptions.translate_fault(task_info.error) [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Faults: ['InvalidArgument'] [ 1546.228926] env[68194]: ERROR nova.compute.manager [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] [ 1546.229792] env[68194]: DEBUG nova.compute.utils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1546.231115] env[68194]: DEBUG nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Build of instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 was re-scheduled: A specified parameter was not correct: fileType [ 1546.231115] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1546.231475] env[68194]: DEBUG nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1546.231657] env[68194]: DEBUG nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1546.231831] env[68194]: DEBUG nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1546.232049] env[68194]: DEBUG nova.network.neutron [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1546.542981] env[68194]: DEBUG nova.network.neutron [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.556020] env[68194]: INFO nova.compute.manager [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Took 0.32 seconds to deallocate network for instance. [ 1546.664683] env[68194]: INFO nova.scheduler.client.report [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Deleted allocations for instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 [ 1546.694118] env[68194]: DEBUG oslo_concurrency.lockutils [None req-20191b3e-6925-4f9b-8d49-6d74dfc63bf0 tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 631.317s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1546.695377] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.156s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1546.695601] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1546.695822] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1546.695999] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1546.698115] env[68194]: INFO nova.compute.manager [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Terminating instance [ 1546.699910] env[68194]: DEBUG nova.compute.manager [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1546.700141] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1546.700608] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-0c512311-9d21-44e7-847e-55e38e9a9f46 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.711555] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-594c7fff-9584-4353-a108-031b10501042 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.722963] env[68194]: DEBUG nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1546.745160] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 47cc57d7-40db-4a19-a983-f4e9ea9e8984 could not be found. [ 1546.745320] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1546.745494] env[68194]: INFO nova.compute.manager [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1546.745734] env[68194]: DEBUG oslo.service.loopingcall [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1546.745959] env[68194]: DEBUG nova.compute.manager [-] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1546.746063] env[68194]: DEBUG nova.network.neutron [-] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1546.774698] env[68194]: DEBUG nova.network.neutron [-] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1546.778689] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1546.778916] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1546.780351] env[68194]: INFO nova.compute.claims [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1546.787522] env[68194]: INFO nova.compute.manager [-] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] Took 0.04 seconds to deallocate network for instance. [ 1546.873662] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d2efa300-12f5-4a46-b9f9-a965f0c6645c tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1546.874520] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 335.408s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1546.874712] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 47cc57d7-40db-4a19-a983-f4e9ea9e8984] During sync_power_state the instance has a pending task (deleting). Skip. [ 1546.874962] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "47cc57d7-40db-4a19-a983-f4e9ea9e8984" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1546.972860] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-02b983df-e39b-413b-b642-0fe5be5ac40c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1546.980456] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2274ecc5-a182-4acf-a161-b3a1c0943440 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.009180] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f5a3fde5-d8b6-4a00-8c12-f8caa6738e31 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.016791] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbd210d1-22b6-4885-8646-b675691bf377 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.032239] env[68194]: DEBUG nova.compute.provider_tree [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1547.042052] env[68194]: DEBUG nova.scheduler.client.report [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1547.054956] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.276s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1547.055435] env[68194]: DEBUG nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1547.094636] env[68194]: DEBUG nova.compute.utils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1547.095852] env[68194]: DEBUG nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Not allocating networking since 'none' was specified. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1952}} [ 1547.105751] env[68194]: DEBUG nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1547.165917] env[68194]: DEBUG nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1547.190520] env[68194]: DEBUG nova.virt.hardware [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1547.190763] env[68194]: DEBUG nova.virt.hardware [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1547.190919] env[68194]: DEBUG nova.virt.hardware [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1547.191116] env[68194]: DEBUG nova.virt.hardware [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1547.191266] env[68194]: DEBUG nova.virt.hardware [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1547.191413] env[68194]: DEBUG nova.virt.hardware [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1547.191744] env[68194]: DEBUG nova.virt.hardware [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1547.191936] env[68194]: DEBUG nova.virt.hardware [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1547.192130] env[68194]: DEBUG nova.virt.hardware [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1547.192300] env[68194]: DEBUG nova.virt.hardware [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1547.192478] env[68194]: DEBUG nova.virt.hardware [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1547.193383] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57bb1d36-c73c-4c5f-83b5-2c6f1e0c5d70 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.200975] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0ff7d010-4efd-425a-9e4d-c77b24086f1c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.214315] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Instance VIF info [] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1547.219580] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Creating folder: Project (5060c6c0914b4453802300993e03a720). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1547.219817] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1c9ff896-8145-4c0a-b240-c2c396ada798 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.229863] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Created folder: Project (5060c6c0914b4453802300993e03a720) in parent group-v692426. [ 1547.230050] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Creating folder: Instances. Parent ref: group-v692522. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1547.230261] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9d1d0c58-73a7-4be8-ba8e-763afc447658 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.238478] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Created folder: Instances in parent group-v692522. [ 1547.238702] env[68194]: DEBUG oslo.service.loopingcall [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1547.238875] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1547.239076] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-d79ad97b-0afd-4ee9-8e18-941f22261c9f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.254775] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1547.254775] env[68194]: value = "task-3466909" [ 1547.254775] env[68194]: _type = "Task" [ 1547.254775] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.261797] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466909, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.764931] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466909, 'name': CreateVM_Task, 'duration_secs': 0.232202} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1547.765897] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1547.765897] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1547.765897] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1547.766264] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1547.766532] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8b779be6-438e-48bf-94da-cc5cad677dbc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1547.770861] env[68194]: DEBUG oslo_vmware.api [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Waiting for the task: (returnval){ [ 1547.770861] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52d017ab-5568-e285-1316-18279ba94bb6" [ 1547.770861] env[68194]: _type = "Task" [ 1547.770861] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1547.777923] env[68194]: DEBUG oslo_vmware.api [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52d017ab-5568-e285-1316-18279ba94bb6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1547.905209] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1548.280048] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1548.280267] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1548.280482] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1548.793701] env[68194]: DEBUG oslo_concurrency.lockutils [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquiring lock "73abf0ba-016c-4536-afd3-f6c6960045fc" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1593.560537] env[68194]: WARNING oslo_vmware.rw_handles [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1593.560537] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1593.560537] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1593.560537] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1593.560537] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1593.560537] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1593.560537] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1593.560537] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1593.560537] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1593.560537] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1593.560537] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1593.560537] env[68194]: ERROR oslo_vmware.rw_handles [ 1593.561310] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/de31cf65-a670-4395-a78a-5652e95389bc/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1593.562849] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1593.563123] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Copying Virtual Disk [datastore1] vmware_temp/de31cf65-a670-4395-a78a-5652e95389bc/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/de31cf65-a670-4395-a78a-5652e95389bc/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1593.563492] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-ecc7a60a-cd89-4953-b084-17317dbbd78d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1593.571864] env[68194]: DEBUG oslo_vmware.api [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Waiting for the task: (returnval){ [ 1593.571864] env[68194]: value = "task-3466910" [ 1593.571864] env[68194]: _type = "Task" [ 1593.571864] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1593.579705] env[68194]: DEBUG oslo_vmware.api [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Task: {'id': task-3466910, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.082541] env[68194]: DEBUG oslo_vmware.exceptions [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1594.082799] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1594.083386] env[68194]: ERROR nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1594.083386] env[68194]: Faults: ['InvalidArgument'] [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Traceback (most recent call last): [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] yield resources [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self.driver.spawn(context, instance, image_meta, [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self._fetch_image_if_missing(context, vi) [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] image_cache(vi, tmp_image_ds_loc) [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] vm_util.copy_virtual_disk( [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] session._wait_for_task(vmdk_copy_task) [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] return self.wait_for_task(task_ref) [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] return evt.wait() [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] result = hub.switch() [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] return self.greenlet.switch() [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self.f(*self.args, **self.kw) [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] raise exceptions.translate_fault(task_info.error) [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Faults: ['InvalidArgument'] [ 1594.083386] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] [ 1594.084261] env[68194]: INFO nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Terminating instance [ 1594.085327] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1594.085516] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1594.085760] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5cbec6e-cca8-43c0-9212-97a1d62c7839 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.088042] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquiring lock "refresh_cache-7ed8ac34-04a2-49fe-9429-f636ff6fff8a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1594.088211] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquired lock "refresh_cache-7ed8ac34-04a2-49fe-9429-f636ff6fff8a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1594.088380] env[68194]: DEBUG nova.network.neutron [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1594.095879] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1594.096066] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1594.096809] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-9bbae4d6-5f9c-4f50-9632-648f8c1974a4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.104244] env[68194]: DEBUG oslo_vmware.api [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Waiting for the task: (returnval){ [ 1594.104244] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5272aef7-7b2e-4d64-4519-61f873fdaeec" [ 1594.104244] env[68194]: _type = "Task" [ 1594.104244] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.111771] env[68194]: DEBUG oslo_vmware.api [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5272aef7-7b2e-4d64-4519-61f873fdaeec, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.149679] env[68194]: DEBUG nova.network.neutron [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1594.209237] env[68194]: DEBUG nova.network.neutron [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1594.218027] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Releasing lock "refresh_cache-7ed8ac34-04a2-49fe-9429-f636ff6fff8a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1594.218427] env[68194]: DEBUG nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1594.218626] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1594.219730] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a8c4934c-7af6-497b-b355-ac2bc1d90215 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.228156] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1594.228392] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eed547cc-4197-4420-b7b0-0f332ccf7d87 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.261017] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1594.261253] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1594.261443] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Deleting the datastore file [datastore1] 7ed8ac34-04a2-49fe-9429-f636ff6fff8a {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1594.261702] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-2fa5afa9-c477-489c-adb5-bec718f2776a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.268150] env[68194]: DEBUG oslo_vmware.api [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Waiting for the task: (returnval){ [ 1594.268150] env[68194]: value = "task-3466912" [ 1594.268150] env[68194]: _type = "Task" [ 1594.268150] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1594.275290] env[68194]: DEBUG oslo_vmware.api [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Task: {'id': task-3466912, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1594.614747] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1594.615037] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Creating directory with path [datastore1] vmware_temp/b8578300-e289-42f7-bc8b-b2c47cee8ba4/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1594.615258] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-f9f993e7-28e9-4f11-a3be-1173d5c344c9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.626071] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Created directory with path [datastore1] vmware_temp/b8578300-e289-42f7-bc8b-b2c47cee8ba4/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1594.626303] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Fetch image to [datastore1] vmware_temp/b8578300-e289-42f7-bc8b-b2c47cee8ba4/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1594.626523] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/b8578300-e289-42f7-bc8b-b2c47cee8ba4/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1594.627278] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5a5e996-9216-4fa2-8c44-bf5ec1a1bfd6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.633898] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54843bb7-3c40-4f3c-a096-1743b34ee1a5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.642997] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-abff3aec-f062-4641-be28-f095b37d0a18 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.674013] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2606801f-2ebf-475c-8273-db8a0368d0ef {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.679175] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-67f1e547-134b-4f20-83af-19fac4c96cf9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1594.698024] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1594.749093] env[68194]: DEBUG oslo_vmware.rw_handles [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b8578300-e289-42f7-bc8b-b2c47cee8ba4/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1594.809061] env[68194]: DEBUG oslo_vmware.rw_handles [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1594.809262] env[68194]: DEBUG oslo_vmware.rw_handles [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/b8578300-e289-42f7-bc8b-b2c47cee8ba4/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1594.813502] env[68194]: DEBUG oslo_vmware.api [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Task: {'id': task-3466912, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.045425} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1594.813736] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1594.813916] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1594.814099] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1594.814276] env[68194]: INFO nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1594.814506] env[68194]: DEBUG oslo.service.loopingcall [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1594.814701] env[68194]: DEBUG nova.compute.manager [-] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Skipping network deallocation for instance since networking was not requested. {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1594.816772] env[68194]: DEBUG nova.compute.claims [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1594.816946] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1594.817181] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1595.015749] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39297a83-6779-4449-ab81-af26985647d7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.023806] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fcad5c3e-9a24-4419-811e-c066d825d4e8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.054339] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaaf7419-2a17-4709-82ad-4f88f686fda1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.061283] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ac89fbbc-02e4-47e6-bf31-e7e3b61d4cb0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.074150] env[68194]: DEBUG nova.compute.provider_tree [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1595.082470] env[68194]: DEBUG nova.scheduler.client.report [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1595.096440] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.279s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1595.096971] env[68194]: ERROR nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1595.096971] env[68194]: Faults: ['InvalidArgument'] [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Traceback (most recent call last): [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self.driver.spawn(context, instance, image_meta, [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self._fetch_image_if_missing(context, vi) [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] image_cache(vi, tmp_image_ds_loc) [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] vm_util.copy_virtual_disk( [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] session._wait_for_task(vmdk_copy_task) [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] return self.wait_for_task(task_ref) [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] return evt.wait() [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] result = hub.switch() [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] return self.greenlet.switch() [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self.f(*self.args, **self.kw) [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] raise exceptions.translate_fault(task_info.error) [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Faults: ['InvalidArgument'] [ 1595.096971] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] [ 1595.097820] env[68194]: DEBUG nova.compute.utils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1595.098991] env[68194]: DEBUG nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Build of instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a was re-scheduled: A specified parameter was not correct: fileType [ 1595.098991] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1595.099379] env[68194]: DEBUG nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1595.099617] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquiring lock "refresh_cache-7ed8ac34-04a2-49fe-9429-f636ff6fff8a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1595.099766] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquired lock "refresh_cache-7ed8ac34-04a2-49fe-9429-f636ff6fff8a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1595.099939] env[68194]: DEBUG nova.network.neutron [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1595.126411] env[68194]: DEBUG nova.network.neutron [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1595.180084] env[68194]: DEBUG nova.network.neutron [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.188619] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Releasing lock "refresh_cache-7ed8ac34-04a2-49fe-9429-f636ff6fff8a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1595.188839] env[68194]: DEBUG nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1595.189030] env[68194]: DEBUG nova.compute.manager [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Skipping network deallocation for instance since networking was not requested. {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1595.351866] env[68194]: INFO nova.scheduler.client.report [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Deleted allocations for instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a [ 1595.372815] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e661538e-8707-4e5e-9330-9182435eeda0 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Lock "7ed8ac34-04a2-49fe-9429-f636ff6fff8a" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 635.399s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1595.374046] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Lock "7ed8ac34-04a2-49fe-9429-f636ff6fff8a" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 439.971s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1595.374191] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquiring lock "7ed8ac34-04a2-49fe-9429-f636ff6fff8a-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1595.374396] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Lock "7ed8ac34-04a2-49fe-9429-f636ff6fff8a-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1595.374569] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Lock "7ed8ac34-04a2-49fe-9429-f636ff6fff8a-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1595.376515] env[68194]: INFO nova.compute.manager [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Terminating instance [ 1595.378831] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquiring lock "refresh_cache-7ed8ac34-04a2-49fe-9429-f636ff6fff8a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1595.378917] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Acquired lock "refresh_cache-7ed8ac34-04a2-49fe-9429-f636ff6fff8a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1595.379089] env[68194]: DEBUG nova.network.neutron [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1595.385768] env[68194]: DEBUG nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1595.406893] env[68194]: DEBUG nova.network.neutron [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1595.441802] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1595.442060] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1595.443599] env[68194]: INFO nova.compute.claims [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1595.478784] env[68194]: DEBUG nova.network.neutron [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1595.489578] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Releasing lock "refresh_cache-7ed8ac34-04a2-49fe-9429-f636ff6fff8a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1595.489578] env[68194]: DEBUG nova.compute.manager [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1595.489578] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1595.489803] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-ce78fa8c-7287-4c91-8328-1402131b42b2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.502018] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-024298ec-4ab8-4923-a8a9-e6042e748c33 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.534550] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7ed8ac34-04a2-49fe-9429-f636ff6fff8a could not be found. [ 1595.534550] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1595.534550] env[68194]: INFO nova.compute.manager [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1595.534752] env[68194]: DEBUG oslo.service.loopingcall [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1595.538785] env[68194]: DEBUG nova.compute.manager [-] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1595.538785] env[68194]: DEBUG nova.network.neutron [-] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1595.655416] env[68194]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68194) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1595.655688] env[68194]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-98fb39b6-073d-4e7c-8674-4d2165b5ecc6'] [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1595.656218] env[68194]: ERROR oslo.service.loopingcall [ 1595.657712] env[68194]: ERROR nova.compute.manager [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1595.662691] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3b5f08a8-8258-4433-88d3-f1aec3468f8c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.670138] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-213a17de-95aa-480c-b913-f73a4a3e5fac {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.699582] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-37ccf9d7-4636-4d50-9432-3c5c63af47c7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.703140] env[68194]: ERROR nova.compute.manager [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Traceback (most recent call last): [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] ret = obj(*args, **kwargs) [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] exception_handler_v20(status_code, error_body) [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] raise client_exc(message=error_message, [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Neutron server returns request_ids: ['req-98fb39b6-073d-4e7c-8674-4d2165b5ecc6'] [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] During handling of the above exception, another exception occurred: [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Traceback (most recent call last): [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self._delete_instance(context, instance, bdms) [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self._shutdown_instance(context, instance, bdms) [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self._try_deallocate_network(context, instance, requested_networks) [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] with excutils.save_and_reraise_exception(): [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self.force_reraise() [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] raise self.value [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] _deallocate_network_with_retries() [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] return evt.wait() [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] result = hub.switch() [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] return self.greenlet.switch() [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] result = func(*self.args, **self.kw) [ 1595.703140] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] result = f(*args, **kwargs) [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self._deallocate_network( [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self.network_api.deallocate_for_instance( [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] data = neutron.list_ports(**search_opts) [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] ret = obj(*args, **kwargs) [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] return self.list('ports', self.ports_path, retrieve_all, [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] ret = obj(*args, **kwargs) [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] for r in self._pagination(collection, path, **params): [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] res = self.get(path, params=params) [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] ret = obj(*args, **kwargs) [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] return self.retry_request("GET", action, body=body, [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] ret = obj(*args, **kwargs) [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] return self.do_request(method, action, body=body, [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] ret = obj(*args, **kwargs) [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] self._handle_fault_response(status_code, replybody, resp) [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1595.704275] env[68194]: ERROR nova.compute.manager [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] [ 1595.710293] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-517251cf-82c8-4b2d-8d19-f4fcfa220d74 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.724223] env[68194]: DEBUG nova.compute.provider_tree [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1595.731729] env[68194]: DEBUG nova.scheduler.client.report [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1595.735241] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Lock "7ed8ac34-04a2-49fe-9429-f636ff6fff8a" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.361s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1595.736313] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "7ed8ac34-04a2-49fe-9429-f636ff6fff8a" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 384.270s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1595.736500] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] During sync_power_state the instance has a pending task (deleting). Skip. [ 1595.736674] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "7ed8ac34-04a2-49fe-9429-f636ff6fff8a" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1595.745302] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.303s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1595.745728] env[68194]: DEBUG nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1595.776362] env[68194]: DEBUG nova.compute.utils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1595.777871] env[68194]: DEBUG nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1595.778078] env[68194]: DEBUG nova.network.neutron [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1595.780341] env[68194]: INFO nova.compute.manager [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] [instance: 7ed8ac34-04a2-49fe-9429-f636ff6fff8a] Successfully reverted task state from None on failure for instance. [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server [None req-ccfb1b46-f926-4b62-b964-d159fd8f0c66 tempest-ServersAaction247Test-1038090335 tempest-ServersAaction247Test-1038090335-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-98fb39b6-073d-4e7c-8674-4d2165b5ecc6'] [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1595.783713] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1595.785782] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1595.787156] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1595.787156] env[68194]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1595.787156] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1595.787156] env[68194]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1595.787156] env[68194]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1595.787156] env[68194]: ERROR oslo_messaging.rpc.server [ 1595.787156] env[68194]: DEBUG nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1595.839523] env[68194]: DEBUG nova.policy [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '628bc4df46494159a5e5a4b71770f64f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7fe2744a0c14564ae1dea9f2653bc4a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1595.846586] env[68194]: DEBUG nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1595.872936] env[68194]: DEBUG nova.virt.hardware [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1595.872936] env[68194]: DEBUG nova.virt.hardware [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1595.873138] env[68194]: DEBUG nova.virt.hardware [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1595.873173] env[68194]: DEBUG nova.virt.hardware [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1595.873330] env[68194]: DEBUG nova.virt.hardware [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1595.873545] env[68194]: DEBUG nova.virt.hardware [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1595.873835] env[68194]: DEBUG nova.virt.hardware [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1595.873947] env[68194]: DEBUG nova.virt.hardware [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1595.874210] env[68194]: DEBUG nova.virt.hardware [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1595.874383] env[68194]: DEBUG nova.virt.hardware [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1595.874555] env[68194]: DEBUG nova.virt.hardware [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1595.875418] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd3ba238-2aa3-40ba-8502-2caa78384db2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1595.883940] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ca390e3-f6db-4d79-ae11-2300e4fe78e5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1596.148110] env[68194]: DEBUG nova.network.neutron [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Successfully created port: bfea196b-820a-4386-8493-bbcc78eea460 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1596.415743] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1596.415970] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1596.951820] env[68194]: DEBUG nova.network.neutron [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Successfully updated port: bfea196b-820a-4386-8493-bbcc78eea460 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1596.968022] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "refresh_cache-2b833505-f170-46ea-8d14-c449f88a7d4c" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1596.968258] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquired lock "refresh_cache-2b833505-f170-46ea-8d14-c449f88a7d4c" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1596.968428] env[68194]: DEBUG nova.network.neutron [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1597.007540] env[68194]: DEBUG nova.network.neutron [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1597.195932] env[68194]: DEBUG nova.network.neutron [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Updating instance_info_cache with network_info: [{"id": "bfea196b-820a-4386-8493-bbcc78eea460", "address": "fa:16:3e:5a:05:42", "network": {"id": "0ba2ed75-b752-41fa-b27d-5a564f3d942e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2075431146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7fe2744a0c14564ae1dea9f2653bc4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47499d09-8010-4d02-ac96-4f057c104692", "external-id": "nsx-vlan-transportzone-14", "segmentation_id": 14, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfea196b-82", "ovs_interfaceid": "bfea196b-820a-4386-8493-bbcc78eea460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1597.208246] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Releasing lock "refresh_cache-2b833505-f170-46ea-8d14-c449f88a7d4c" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1597.208731] env[68194]: DEBUG nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Instance network_info: |[{"id": "bfea196b-820a-4386-8493-bbcc78eea460", "address": "fa:16:3e:5a:05:42", "network": {"id": "0ba2ed75-b752-41fa-b27d-5a564f3d942e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2075431146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7fe2744a0c14564ae1dea9f2653bc4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47499d09-8010-4d02-ac96-4f057c104692", "external-id": "nsx-vlan-transportzone-14", "segmentation_id": 14, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfea196b-82", "ovs_interfaceid": "bfea196b-820a-4386-8493-bbcc78eea460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1597.210020] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:5a:05:42', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '47499d09-8010-4d02-ac96-4f057c104692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bfea196b-820a-4386-8493-bbcc78eea460', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1597.222731] env[68194]: DEBUG oslo.service.loopingcall [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1597.223579] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1597.223910] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-304aaec2-f192-4089-a8e0-1642d1aa1b31 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.252677] env[68194]: DEBUG nova.compute.manager [req-1eaa3488-d1db-4006-831f-857bcd18654c req-91405a99-b039-442a-a1f1-9a39028516e2 service nova] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Received event network-vif-plugged-bfea196b-820a-4386-8493-bbcc78eea460 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1597.252997] env[68194]: DEBUG oslo_concurrency.lockutils [req-1eaa3488-d1db-4006-831f-857bcd18654c req-91405a99-b039-442a-a1f1-9a39028516e2 service nova] Acquiring lock "2b833505-f170-46ea-8d14-c449f88a7d4c-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1597.253345] env[68194]: DEBUG oslo_concurrency.lockutils [req-1eaa3488-d1db-4006-831f-857bcd18654c req-91405a99-b039-442a-a1f1-9a39028516e2 service nova] Lock "2b833505-f170-46ea-8d14-c449f88a7d4c-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1597.253634] env[68194]: DEBUG oslo_concurrency.lockutils [req-1eaa3488-d1db-4006-831f-857bcd18654c req-91405a99-b039-442a-a1f1-9a39028516e2 service nova] Lock "2b833505-f170-46ea-8d14-c449f88a7d4c-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1597.253913] env[68194]: DEBUG nova.compute.manager [req-1eaa3488-d1db-4006-831f-857bcd18654c req-91405a99-b039-442a-a1f1-9a39028516e2 service nova] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] No waiting events found dispatching network-vif-plugged-bfea196b-820a-4386-8493-bbcc78eea460 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1597.254208] env[68194]: WARNING nova.compute.manager [req-1eaa3488-d1db-4006-831f-857bcd18654c req-91405a99-b039-442a-a1f1-9a39028516e2 service nova] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Received unexpected event network-vif-plugged-bfea196b-820a-4386-8493-bbcc78eea460 for instance with vm_state building and task_state spawning. [ 1597.260936] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1597.260936] env[68194]: value = "task-3466913" [ 1597.260936] env[68194]: _type = "Task" [ 1597.260936] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.273254] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466913, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1597.775473] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466913, 'name': CreateVM_Task, 'duration_secs': 0.288671} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1597.775746] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1597.777138] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1597.777407] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1597.777876] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1597.778483] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-ac5c5d1b-fe34-44d1-b08a-76a05c149cc8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1597.783284] env[68194]: DEBUG oslo_vmware.api [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for the task: (returnval){ [ 1597.783284] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52182d9c-8097-1c18-86d0-19823894fcdb" [ 1597.783284] env[68194]: _type = "Task" [ 1597.783284] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1597.790801] env[68194]: DEBUG oslo_vmware.api [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52182d9c-8097-1c18-86d0-19823894fcdb, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1598.293175] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1598.293545] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1598.293657] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1599.253535] env[68194]: DEBUG nova.compute.manager [req-67055bd5-bdbf-4bba-9d2b-da3db96b2be1 req-307ec790-77bc-4e23-8312-935e3af1761b service nova] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Received event network-changed-bfea196b-820a-4386-8493-bbcc78eea460 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1599.253629] env[68194]: DEBUG nova.compute.manager [req-67055bd5-bdbf-4bba-9d2b-da3db96b2be1 req-307ec790-77bc-4e23-8312-935e3af1761b service nova] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Refreshing instance network info cache due to event network-changed-bfea196b-820a-4386-8493-bbcc78eea460. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1599.253841] env[68194]: DEBUG oslo_concurrency.lockutils [req-67055bd5-bdbf-4bba-9d2b-da3db96b2be1 req-307ec790-77bc-4e23-8312-935e3af1761b service nova] Acquiring lock "refresh_cache-2b833505-f170-46ea-8d14-c449f88a7d4c" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1599.253992] env[68194]: DEBUG oslo_concurrency.lockutils [req-67055bd5-bdbf-4bba-9d2b-da3db96b2be1 req-307ec790-77bc-4e23-8312-935e3af1761b service nova] Acquired lock "refresh_cache-2b833505-f170-46ea-8d14-c449f88a7d4c" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1599.254676] env[68194]: DEBUG nova.network.neutron [req-67055bd5-bdbf-4bba-9d2b-da3db96b2be1 req-307ec790-77bc-4e23-8312-935e3af1761b service nova] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Refreshing network info cache for port bfea196b-820a-4386-8493-bbcc78eea460 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1599.500655] env[68194]: DEBUG nova.network.neutron [req-67055bd5-bdbf-4bba-9d2b-da3db96b2be1 req-307ec790-77bc-4e23-8312-935e3af1761b service nova] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Updated VIF entry in instance network info cache for port bfea196b-820a-4386-8493-bbcc78eea460. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1599.501090] env[68194]: DEBUG nova.network.neutron [req-67055bd5-bdbf-4bba-9d2b-da3db96b2be1 req-307ec790-77bc-4e23-8312-935e3af1761b service nova] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Updating instance_info_cache with network_info: [{"id": "bfea196b-820a-4386-8493-bbcc78eea460", "address": "fa:16:3e:5a:05:42", "network": {"id": "0ba2ed75-b752-41fa-b27d-5a564f3d942e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2075431146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.4", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7fe2744a0c14564ae1dea9f2653bc4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47499d09-8010-4d02-ac96-4f057c104692", "external-id": "nsx-vlan-transportzone-14", "segmentation_id": 14, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbfea196b-82", "ovs_interfaceid": "bfea196b-820a-4386-8493-bbcc78eea460", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1599.510735] env[68194]: DEBUG oslo_concurrency.lockutils [req-67055bd5-bdbf-4bba-9d2b-da3db96b2be1 req-307ec790-77bc-4e23-8312-935e3af1761b service nova] Releasing lock "refresh_cache-2b833505-f170-46ea-8d14-c449f88a7d4c" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1601.416274] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1603.416061] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.417287] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1604.417676] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1605.411956] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1605.415573] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1606.416029] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1606.416375] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1606.416375] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1606.439908] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1606.440127] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1606.440269] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1606.440419] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1606.440560] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1606.440695] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1606.440820] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1606.440944] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1606.441080] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1606.441205] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1606.441329] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1606.441849] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1606.452667] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1606.452892] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1606.453079] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1606.453415] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1606.454292] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-52598fd3-540c-4d78-9c70-e32a75298fe7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.462999] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc76e010-4a1c-4288-935f-c0b4124cd998 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.477126] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2da225f3-c3ba-4239-b22e-2b6859ac6e4f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.483474] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46225a8b-8a5c-4efa-989f-183f9ebc1820 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.511996] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180950MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1606.512158] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1606.512342] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1606.597699] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bcb53c97-8d95-4d67-b310-d19087b0b298 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1606.597865] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b487291e-1b85-4064-9949-3d8895b6dcae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1606.597992] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1606.598136] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1606.598260] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bf9766c7-1495-4edd-92bd-06a0d036855e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1606.598382] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1606.598500] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3da3b410-889a-42c5-9603-f92f689ab5b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1606.598620] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 95be4f59-e835-4389-93ae-9814e97f8ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1606.598733] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 73abf0ba-016c-4536-afd3-f6c6960045fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1606.598847] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2b833505-f170-46ea-8d14-c449f88a7d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1606.608963] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1606.618422] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance dc7215fa-bc03-464e-81f0-22636be16748 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1606.618644] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1606.618793] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1606.751936] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-64b2599b-ea67-42a8-a714-b7d363cdabc7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.759693] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6f01ca92-1765-4f63-8634-a0a86b8b1397 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.788268] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-482ef705-ece4-49b6-ad53-c3cbe67fb0f5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.795019] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f474db67-9fb7-47bb-8063-6bc3c0efbd68 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1606.807503] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1606.816554] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1606.829744] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1606.829946] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.318s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1612.280295] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "4bcfda9d-e14b-441c-aebb-498dbc10513e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1612.280610] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "4bcfda9d-e14b-441c-aebb-498dbc10513e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1637.304623] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "2b833505-f170-46ea-8d14-c449f88a7d4c" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1643.579444] env[68194]: WARNING oslo_vmware.rw_handles [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1643.579444] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1643.579444] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1643.579444] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1643.579444] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1643.579444] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1643.579444] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1643.579444] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1643.579444] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1643.579444] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1643.579444] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1643.579444] env[68194]: ERROR oslo_vmware.rw_handles [ 1643.579963] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/b8578300-e289-42f7-bc8b-b2c47cee8ba4/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1643.581911] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1643.582300] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Copying Virtual Disk [datastore1] vmware_temp/b8578300-e289-42f7-bc8b-b2c47cee8ba4/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/b8578300-e289-42f7-bc8b-b2c47cee8ba4/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1643.582603] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1f2e6c4d-36e6-4992-a7fc-d50f9b6afabf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1643.590800] env[68194]: DEBUG oslo_vmware.api [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Waiting for the task: (returnval){ [ 1643.590800] env[68194]: value = "task-3466914" [ 1643.590800] env[68194]: _type = "Task" [ 1643.590800] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1643.598876] env[68194]: DEBUG oslo_vmware.api [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Task: {'id': task-3466914, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.100479] env[68194]: DEBUG oslo_vmware.exceptions [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1644.100719] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1644.101329] env[68194]: ERROR nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1644.101329] env[68194]: Faults: ['InvalidArgument'] [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Traceback (most recent call last): [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] yield resources [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] self.driver.spawn(context, instance, image_meta, [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] self._fetch_image_if_missing(context, vi) [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] image_cache(vi, tmp_image_ds_loc) [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] vm_util.copy_virtual_disk( [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] session._wait_for_task(vmdk_copy_task) [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] return self.wait_for_task(task_ref) [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] return evt.wait() [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] result = hub.switch() [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] return self.greenlet.switch() [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] self.f(*self.args, **self.kw) [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] raise exceptions.translate_fault(task_info.error) [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Faults: ['InvalidArgument'] [ 1644.101329] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] [ 1644.102466] env[68194]: INFO nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Terminating instance [ 1644.103271] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1644.103501] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1644.103741] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3a22b6c4-0bfb-41cc-9c26-b3ebfaf9d275 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.106129] env[68194]: DEBUG nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1644.106331] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1644.107046] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2474a36-ab57-411f-81cd-0cc26ed8654b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.113444] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1644.113653] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-eb7adbfc-ed81-49e0-9542-d360b2e2a9c8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.115794] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1644.115969] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1644.116911] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5f07b25b-c63b-4794-9f1f-14773a832b15 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.121653] env[68194]: DEBUG oslo_vmware.api [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 1644.121653] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52dd77f2-29e5-d49a-9119-c19cd5c53445" [ 1644.121653] env[68194]: _type = "Task" [ 1644.121653] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.132810] env[68194]: DEBUG oslo_vmware.api [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52dd77f2-29e5-d49a-9119-c19cd5c53445, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.184584] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1644.184813] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1644.184995] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Deleting the datastore file [datastore1] bcb53c97-8d95-4d67-b310-d19087b0b298 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1644.185273] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7cadac65-6c57-4c8b-8b8f-cb24c8914691 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.191872] env[68194]: DEBUG oslo_vmware.api [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Waiting for the task: (returnval){ [ 1644.191872] env[68194]: value = "task-3466916" [ 1644.191872] env[68194]: _type = "Task" [ 1644.191872] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1644.199442] env[68194]: DEBUG oslo_vmware.api [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Task: {'id': task-3466916, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1644.632749] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1644.633061] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Creating directory with path [datastore1] vmware_temp/45f5c808-bc07-4206-acfc-3c91582b844e/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1644.633330] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-fc4f5211-afe2-46de-9536-d8935c02f40f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.644470] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Created directory with path [datastore1] vmware_temp/45f5c808-bc07-4206-acfc-3c91582b844e/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1644.644654] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Fetch image to [datastore1] vmware_temp/45f5c808-bc07-4206-acfc-3c91582b844e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1644.644825] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/45f5c808-bc07-4206-acfc-3c91582b844e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1644.645547] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c19847d-6ea0-45d8-b31d-0187a6683708 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.651969] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d3850b9-718e-4ccd-bb0c-f3e61ee16b20 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.660647] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3d5e16f-4677-42f9-83a6-e070b0852f05 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.691031] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b1b3994-d49a-42b7-bb59-49aba7a08b2a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.701051] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ca85cd72-1be0-404b-a1d5-08fe0af4b36f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.702625] env[68194]: DEBUG oslo_vmware.api [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Task: {'id': task-3466916, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074291} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1644.702851] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1644.703039] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1644.703244] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1644.703419] env[68194]: INFO nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1644.705481] env[68194]: DEBUG nova.compute.claims [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1644.705652] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1644.705861] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1644.724920] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1644.788652] env[68194]: DEBUG oslo_vmware.rw_handles [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/45f5c808-bc07-4206-acfc-3c91582b844e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1644.846700] env[68194]: DEBUG oslo_vmware.rw_handles [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1644.846889] env[68194]: DEBUG oslo_vmware.rw_handles [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/45f5c808-bc07-4206-acfc-3c91582b844e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1644.952540] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-98971c24-bd76-492c-bb02-9a126fee4e64 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.960090] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-207bdb17-e15e-4241-9ee8-5c24de7814aa {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.989221] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-836f2ba6-0c22-487a-beeb-31973bdff164 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1644.996262] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b999691-76ae-46fd-aa64-2213934ef060 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.009888] env[68194]: DEBUG nova.compute.provider_tree [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1645.018507] env[68194]: DEBUG nova.scheduler.client.report [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1645.033430] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.327s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1645.033977] env[68194]: ERROR nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1645.033977] env[68194]: Faults: ['InvalidArgument'] [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Traceback (most recent call last): [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] self.driver.spawn(context, instance, image_meta, [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] self._fetch_image_if_missing(context, vi) [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] image_cache(vi, tmp_image_ds_loc) [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] vm_util.copy_virtual_disk( [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] session._wait_for_task(vmdk_copy_task) [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] return self.wait_for_task(task_ref) [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] return evt.wait() [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] result = hub.switch() [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] return self.greenlet.switch() [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] self.f(*self.args, **self.kw) [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] raise exceptions.translate_fault(task_info.error) [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Faults: ['InvalidArgument'] [ 1645.033977] env[68194]: ERROR nova.compute.manager [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] [ 1645.034750] env[68194]: DEBUG nova.compute.utils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1645.035982] env[68194]: DEBUG nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Build of instance bcb53c97-8d95-4d67-b310-d19087b0b298 was re-scheduled: A specified parameter was not correct: fileType [ 1645.035982] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1645.036359] env[68194]: DEBUG nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1645.036536] env[68194]: DEBUG nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1645.036705] env[68194]: DEBUG nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1645.036864] env[68194]: DEBUG nova.network.neutron [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1645.500902] env[68194]: DEBUG nova.network.neutron [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1645.515214] env[68194]: INFO nova.compute.manager [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Took 0.48 seconds to deallocate network for instance. [ 1645.611166] env[68194]: INFO nova.scheduler.client.report [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Deleted allocations for instance bcb53c97-8d95-4d67-b310-d19087b0b298 [ 1645.637141] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b170f705-203c-4b78-901f-233cec680356 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "bcb53c97-8d95-4d67-b310-d19087b0b298" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 623.214s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1645.638514] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "bcb53c97-8d95-4d67-b310-d19087b0b298" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 434.172s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1645.638741] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] During sync_power_state the instance has a pending task (spawning). Skip. [ 1645.638911] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "bcb53c97-8d95-4d67-b310-d19087b0b298" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1645.639602] env[68194]: DEBUG oslo_concurrency.lockutils [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "bcb53c97-8d95-4d67-b310-d19087b0b298" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 425.671s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1645.639834] env[68194]: DEBUG oslo_concurrency.lockutils [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Acquiring lock "bcb53c97-8d95-4d67-b310-d19087b0b298-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1645.640056] env[68194]: DEBUG oslo_concurrency.lockutils [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "bcb53c97-8d95-4d67-b310-d19087b0b298-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1645.640232] env[68194]: DEBUG oslo_concurrency.lockutils [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "bcb53c97-8d95-4d67-b310-d19087b0b298-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1645.642047] env[68194]: INFO nova.compute.manager [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Terminating instance [ 1645.643725] env[68194]: DEBUG nova.compute.manager [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1645.643935] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1645.644219] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-00b9da3d-563e-48d2-8916-1fb72695d0f8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.651933] env[68194]: DEBUG nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1645.661651] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77c321f3-d819-4b30-9d51-8f73ec3034a7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.691976] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bcb53c97-8d95-4d67-b310-d19087b0b298 could not be found. [ 1645.692213] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1645.692412] env[68194]: INFO nova.compute.manager [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Took 0.05 seconds to destroy the instance on the hypervisor. [ 1645.692670] env[68194]: DEBUG oslo.service.loopingcall [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1645.695247] env[68194]: DEBUG nova.compute.manager [-] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1645.695347] env[68194]: DEBUG nova.network.neutron [-] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1645.711705] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1645.711705] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1645.713114] env[68194]: INFO nova.compute.claims [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1645.723911] env[68194]: DEBUG nova.network.neutron [-] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1645.735358] env[68194]: INFO nova.compute.manager [-] [instance: bcb53c97-8d95-4d67-b310-d19087b0b298] Took 0.04 seconds to deallocate network for instance. [ 1645.842889] env[68194]: DEBUG oslo_concurrency.lockutils [None req-05c00054-0ac1-4c21-a9d3-d4c4ac68c028 tempest-ListServerFiltersTestJSON-1619794283 tempest-ListServerFiltersTestJSON-1619794283-project-member] Lock "bcb53c97-8d95-4d67-b310-d19087b0b298" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.203s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1645.930440] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-076ef197-0204-43c8-897b-29ee01521c12 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.938228] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ae49d573-7f5c-46e1-871f-d5268961d0b8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.967462] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4cda1982-21d7-4dc0-abe1-fc55b9a6024d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.973961] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-700f0c06-a47d-405c-846a-381644d9069a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1645.986270] env[68194]: DEBUG nova.compute.provider_tree [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1645.995022] env[68194]: DEBUG nova.scheduler.client.report [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1646.008622] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.297s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1646.009082] env[68194]: DEBUG nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1646.043728] env[68194]: DEBUG nova.compute.utils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1646.045521] env[68194]: DEBUG nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1646.045705] env[68194]: DEBUG nova.network.neutron [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1646.054185] env[68194]: DEBUG nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1646.131403] env[68194]: DEBUG nova.policy [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '905b97edce374ad5a240d61220f66f80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05181674899f44e7bb6d234643c3e6b6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1646.156278] env[68194]: DEBUG nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1646.181571] env[68194]: DEBUG nova.virt.hardware [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1646.181830] env[68194]: DEBUG nova.virt.hardware [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1646.181990] env[68194]: DEBUG nova.virt.hardware [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1646.182192] env[68194]: DEBUG nova.virt.hardware [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1646.182382] env[68194]: DEBUG nova.virt.hardware [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1646.182490] env[68194]: DEBUG nova.virt.hardware [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1646.182700] env[68194]: DEBUG nova.virt.hardware [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1646.182975] env[68194]: DEBUG nova.virt.hardware [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1646.183179] env[68194]: DEBUG nova.virt.hardware [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1646.183578] env[68194]: DEBUG nova.virt.hardware [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1646.183798] env[68194]: DEBUG nova.virt.hardware [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1646.184807] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5eab1d87-af16-497d-88ec-995950f48cf3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.194764] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db8578c3-b239-440e-8301-e9562eee1e55 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1646.417737] env[68194]: DEBUG nova.network.neutron [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Successfully created port: a1b0c767-b759-4b64-b9b0-56834b319a6c {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1646.976221] env[68194]: DEBUG nova.network.neutron [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Successfully updated port: a1b0c767-b759-4b64-b9b0-56834b319a6c {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1646.988941] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "refresh_cache-d5bc98e3-9621-41bb-90a3-2f8e80c6928b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1646.988941] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired lock "refresh_cache-d5bc98e3-9621-41bb-90a3-2f8e80c6928b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1646.988941] env[68194]: DEBUG nova.network.neutron [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1647.026461] env[68194]: DEBUG nova.network.neutron [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1647.187259] env[68194]: DEBUG nova.network.neutron [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Updating instance_info_cache with network_info: [{"id": "a1b0c767-b759-4b64-b9b0-56834b319a6c", "address": "fa:16:3e:88:4a:f3", "network": {"id": "327b172e-477f-4070-9bcb-c1216237bde9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-980534088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05181674899f44e7bb6d234643c3e6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1b0c767-b7", "ovs_interfaceid": "a1b0c767-b759-4b64-b9b0-56834b319a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.198299] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Releasing lock "refresh_cache-d5bc98e3-9621-41bb-90a3-2f8e80c6928b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1647.198571] env[68194]: DEBUG nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Instance network_info: |[{"id": "a1b0c767-b759-4b64-b9b0-56834b319a6c", "address": "fa:16:3e:88:4a:f3", "network": {"id": "327b172e-477f-4070-9bcb-c1216237bde9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-980534088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05181674899f44e7bb6d234643c3e6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1b0c767-b7", "ovs_interfaceid": "a1b0c767-b759-4b64-b9b0-56834b319a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1647.198953] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:88:4a:f3', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3291573-fad8-48cc-a965-c3554e7cee4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'a1b0c767-b759-4b64-b9b0-56834b319a6c', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1647.206560] env[68194]: DEBUG oslo.service.loopingcall [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1647.206976] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1647.207219] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-a3484fc9-dcad-45c8-8cc0-c60f487dafc8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.227490] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1647.227490] env[68194]: value = "task-3466917" [ 1647.227490] env[68194]: _type = "Task" [ 1647.227490] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.235115] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466917, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.540144] env[68194]: DEBUG nova.compute.manager [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Received event network-vif-plugged-a1b0c767-b759-4b64-b9b0-56834b319a6c {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1647.540377] env[68194]: DEBUG oslo_concurrency.lockutils [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] Acquiring lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1647.540591] env[68194]: DEBUG oslo_concurrency.lockutils [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] Lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1647.540761] env[68194]: DEBUG oslo_concurrency.lockutils [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] Lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1647.540929] env[68194]: DEBUG nova.compute.manager [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] No waiting events found dispatching network-vif-plugged-a1b0c767-b759-4b64-b9b0-56834b319a6c {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1647.541420] env[68194]: WARNING nova.compute.manager [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Received unexpected event network-vif-plugged-a1b0c767-b759-4b64-b9b0-56834b319a6c for instance with vm_state building and task_state spawning. [ 1647.541711] env[68194]: DEBUG nova.compute.manager [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Received event network-changed-a1b0c767-b759-4b64-b9b0-56834b319a6c {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1647.541966] env[68194]: DEBUG nova.compute.manager [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Refreshing instance network info cache due to event network-changed-a1b0c767-b759-4b64-b9b0-56834b319a6c. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1647.542296] env[68194]: DEBUG oslo_concurrency.lockutils [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] Acquiring lock "refresh_cache-d5bc98e3-9621-41bb-90a3-2f8e80c6928b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1647.542584] env[68194]: DEBUG oslo_concurrency.lockutils [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] Acquired lock "refresh_cache-d5bc98e3-9621-41bb-90a3-2f8e80c6928b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1647.542882] env[68194]: DEBUG nova.network.neutron [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Refreshing network info cache for port a1b0c767-b759-4b64-b9b0-56834b319a6c {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1647.737970] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466917, 'name': CreateVM_Task, 'duration_secs': 0.301936} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1647.740412] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1647.741070] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1647.741254] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1647.741571] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1647.742119] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-47fb2376-4017-4c5e-82ab-9841b082dbf4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1647.746871] env[68194]: DEBUG oslo_vmware.api [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 1647.746871] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52707b7d-2047-4df1-c1cc-4f4fd79ff9e9" [ 1647.746871] env[68194]: _type = "Task" [ 1647.746871] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1647.754122] env[68194]: DEBUG oslo_vmware.api [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52707b7d-2047-4df1-c1cc-4f4fd79ff9e9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1647.823084] env[68194]: DEBUG nova.network.neutron [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Updated VIF entry in instance network info cache for port a1b0c767-b759-4b64-b9b0-56834b319a6c. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1647.823508] env[68194]: DEBUG nova.network.neutron [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Updating instance_info_cache with network_info: [{"id": "a1b0c767-b759-4b64-b9b0-56834b319a6c", "address": "fa:16:3e:88:4a:f3", "network": {"id": "327b172e-477f-4070-9bcb-c1216237bde9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-980534088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.9", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05181674899f44e7bb6d234643c3e6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapa1b0c767-b7", "ovs_interfaceid": "a1b0c767-b759-4b64-b9b0-56834b319a6c", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1647.832426] env[68194]: DEBUG oslo_concurrency.lockutils [req-71eb4162-4396-47db-8332-9cbb2c7c4664 req-c07dba77-fdb9-4f3a-8f45-0241633a3987 service nova] Releasing lock "refresh_cache-d5bc98e3-9621-41bb-90a3-2f8e80c6928b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1648.257298] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1648.257635] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1648.257763] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1652.631096] env[68194]: DEBUG oslo_concurrency.lockutils [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1656.804686] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1656.805084] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1661.416603] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1663.416597] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1665.419571] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1665.420190] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1665.420402] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.319287] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1667.319568] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1667.412143] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.412372] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.439893] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1667.451399] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1667.451655] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1667.451822] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1667.451977] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1667.453078] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4b149a57-8b6d-4a65-998c-1e44eaae1f5b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.461919] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3d810bd9-5ca4-49b2-a4d7-f564fab18dcf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.475533] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61b8c15f-9f56-4b48-9764-7032e20b826a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.481629] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f56226c5-5f65-41bc-9379-c70fb223306e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.511609] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180948MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1667.511781] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1667.511973] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1667.582763] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance b487291e-1b85-4064-9949-3d8895b6dcae actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1667.582981] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1667.583176] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1667.583316] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bf9766c7-1495-4edd-92bd-06a0d036855e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1667.583442] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1667.583565] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3da3b410-889a-42c5-9603-f92f689ab5b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1667.583713] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 95be4f59-e835-4389-93ae-9814e97f8ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1667.583841] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 73abf0ba-016c-4536-afd3-f6c6960045fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1667.583961] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2b833505-f170-46ea-8d14-c449f88a7d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1667.584092] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1667.595117] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance dc7215fa-bc03-464e-81f0-22636be16748 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1667.608275] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4bcfda9d-e14b-441c-aebb-498dbc10513e has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1667.618145] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bd9479c5-a9f5-47a6-b731-f0bf4633b688 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1667.618377] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1667.618525] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1667.780366] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c35a981-0d55-4bb7-9cf9-1d6f9359cdc4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.787786] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7adcbd36-c7dc-49f4-bd01-73635f2f7c50 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.816830] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-49c3610c-9dc1-401b-832f-62f6b6dc060a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.823683] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db11dd40-8104-4247-9d0f-289c8c658e19 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1667.836671] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1667.844520] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1667.857298] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1667.857477] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.345s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1668.834619] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1668.834892] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1668.835014] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1668.859209] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1668.859492] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1668.859739] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1668.859972] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1668.860223] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1668.860451] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1668.860680] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1668.860927] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1668.861153] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1668.861382] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1668.861612] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1693.570673] env[68194]: WARNING oslo_vmware.rw_handles [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1693.570673] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1693.570673] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1693.570673] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1693.570673] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1693.570673] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1693.570673] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1693.570673] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1693.570673] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1693.570673] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1693.570673] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1693.570673] env[68194]: ERROR oslo_vmware.rw_handles [ 1693.570673] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/45f5c808-bc07-4206-acfc-3c91582b844e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1693.572577] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1693.572819] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Copying Virtual Disk [datastore1] vmware_temp/45f5c808-bc07-4206-acfc-3c91582b844e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/45f5c808-bc07-4206-acfc-3c91582b844e/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1693.573131] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-058e08ba-f4a7-437d-8b98-ba2caeaf9ebc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1693.581113] env[68194]: DEBUG oslo_vmware.api [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 1693.581113] env[68194]: value = "task-3466918" [ 1693.581113] env[68194]: _type = "Task" [ 1693.581113] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1693.588581] env[68194]: DEBUG oslo_vmware.api [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': task-3466918, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.091484] env[68194]: DEBUG oslo_vmware.exceptions [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1694.091771] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1694.092328] env[68194]: ERROR nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1694.092328] env[68194]: Faults: ['InvalidArgument'] [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Traceback (most recent call last): [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] yield resources [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] self.driver.spawn(context, instance, image_meta, [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] self._fetch_image_if_missing(context, vi) [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] image_cache(vi, tmp_image_ds_loc) [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] vm_util.copy_virtual_disk( [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] session._wait_for_task(vmdk_copy_task) [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] return self.wait_for_task(task_ref) [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] return evt.wait() [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] result = hub.switch() [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] return self.greenlet.switch() [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] self.f(*self.args, **self.kw) [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] raise exceptions.translate_fault(task_info.error) [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Faults: ['InvalidArgument'] [ 1694.092328] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] [ 1694.093294] env[68194]: INFO nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Terminating instance [ 1694.094247] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1694.094460] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1694.094694] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d3e0d8c7-5284-4b38-b795-a07525f3df69 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.097070] env[68194]: DEBUG nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1694.097264] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1694.097969] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93f4a41f-b73e-45ba-876b-304b4367a8d0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.104500] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1694.104695] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-66590333-eeb5-4732-a8c4-513962975226 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.106721] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1694.106891] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1694.107809] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f51edec1-ce5a-42f8-a2b7-dee5dab75b0c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.112693] env[68194]: DEBUG oslo_vmware.api [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Waiting for the task: (returnval){ [ 1694.112693] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52085911-3906-5dcd-0c2b-d387b654e684" [ 1694.112693] env[68194]: _type = "Task" [ 1694.112693] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.119458] env[68194]: DEBUG oslo_vmware.api [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52085911-3906-5dcd-0c2b-d387b654e684, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.168354] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1694.168531] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1694.168709] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Deleting the datastore file [datastore1] b487291e-1b85-4064-9949-3d8895b6dcae {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1694.168961] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1b8cf6a8-9441-459f-b7ea-a36a92fe95b9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.175409] env[68194]: DEBUG oslo_vmware.api [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 1694.175409] env[68194]: value = "task-3466920" [ 1694.175409] env[68194]: _type = "Task" [ 1694.175409] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1694.182926] env[68194]: DEBUG oslo_vmware.api [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': task-3466920, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1694.623289] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1694.623596] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Creating directory with path [datastore1] vmware_temp/16d70dd5-b121-495b-a07b-435f02592f6d/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1694.623723] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d7830b4f-77b1-4710-ad5d-f3890f57bdab {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.635704] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Created directory with path [datastore1] vmware_temp/16d70dd5-b121-495b-a07b-435f02592f6d/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1694.635916] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Fetch image to [datastore1] vmware_temp/16d70dd5-b121-495b-a07b-435f02592f6d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1694.636149] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/16d70dd5-b121-495b-a07b-435f02592f6d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1694.636926] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4c8eba02-1c0f-4598-b2e9-eeb4a6ab203f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.643563] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-95079c32-6998-459e-b2bf-01e344f601fd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.652610] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a706b8-d332-41ad-9f95-3d6e6cb0120e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.685969] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8617b0bb-0a92-4719-a790-6cff475d63ae {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.692590] env[68194]: DEBUG oslo_vmware.api [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': task-3466920, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076771} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1694.693939] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1694.694159] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1694.694355] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1694.694535] env[68194]: INFO nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1694.696279] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-38f3dbf6-9f7d-428e-abd8-e8811fbd69d9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.698103] env[68194]: DEBUG nova.compute.claims [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1694.698281] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1694.698515] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1694.718669] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1694.771400] env[68194]: DEBUG oslo_vmware.rw_handles [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/16d70dd5-b121-495b-a07b-435f02592f6d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1694.832941] env[68194]: DEBUG oslo_vmware.rw_handles [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1694.833143] env[68194]: DEBUG oslo_vmware.rw_handles [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/16d70dd5-b121-495b-a07b-435f02592f6d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1694.962328] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa7dba88-1ca6-4dd5-9153-91e72d0bd4ef {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.969665] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-18851de7-5fc9-4f18-8fab-543aab55d1d6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1694.998404] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eb1400e0-3c68-4ecb-b06c-00acfcd59a28 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.004948] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c0d04741-788b-4741-bb42-51d102e7754a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.018144] env[68194]: DEBUG nova.compute.provider_tree [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1695.026299] env[68194]: DEBUG nova.scheduler.client.report [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1695.039585] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.341s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1695.040094] env[68194]: ERROR nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1695.040094] env[68194]: Faults: ['InvalidArgument'] [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Traceback (most recent call last): [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] self.driver.spawn(context, instance, image_meta, [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] self._fetch_image_if_missing(context, vi) [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] image_cache(vi, tmp_image_ds_loc) [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] vm_util.copy_virtual_disk( [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] session._wait_for_task(vmdk_copy_task) [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] return self.wait_for_task(task_ref) [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] return evt.wait() [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] result = hub.switch() [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] return self.greenlet.switch() [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] self.f(*self.args, **self.kw) [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] raise exceptions.translate_fault(task_info.error) [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Faults: ['InvalidArgument'] [ 1695.040094] env[68194]: ERROR nova.compute.manager [instance: b487291e-1b85-4064-9949-3d8895b6dcae] [ 1695.040995] env[68194]: DEBUG nova.compute.utils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1695.042129] env[68194]: DEBUG nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Build of instance b487291e-1b85-4064-9949-3d8895b6dcae was re-scheduled: A specified parameter was not correct: fileType [ 1695.042129] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1695.042527] env[68194]: DEBUG nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1695.042705] env[68194]: DEBUG nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1695.042876] env[68194]: DEBUG nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1695.043052] env[68194]: DEBUG nova.network.neutron [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1695.343314] env[68194]: DEBUG nova.network.neutron [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.355137] env[68194]: INFO nova.compute.manager [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Took 0.31 seconds to deallocate network for instance. [ 1695.444899] env[68194]: INFO nova.scheduler.client.report [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Deleted allocations for instance b487291e-1b85-4064-9949-3d8895b6dcae [ 1695.470410] env[68194]: DEBUG oslo_concurrency.lockutils [None req-8cdc7e16-0434-40d6-9d7e-2e6bef6a00ad tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "b487291e-1b85-4064-9949-3d8895b6dcae" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 632.023s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1695.471783] env[68194]: DEBUG oslo_concurrency.lockutils [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "b487291e-1b85-4064-9949-3d8895b6dcae" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 436.203s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1695.472085] env[68194]: DEBUG oslo_concurrency.lockutils [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "b487291e-1b85-4064-9949-3d8895b6dcae-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1695.472322] env[68194]: DEBUG oslo_concurrency.lockutils [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "b487291e-1b85-4064-9949-3d8895b6dcae-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1695.472500] env[68194]: DEBUG oslo_concurrency.lockutils [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "b487291e-1b85-4064-9949-3d8895b6dcae-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1695.475721] env[68194]: INFO nova.compute.manager [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Terminating instance [ 1695.477499] env[68194]: DEBUG nova.compute.manager [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1695.477698] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1695.477980] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-07b30341-775a-476a-9a44-57be98d52735 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.488031] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f272de76-034d-4097-a0c3-cced619fedf4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.498236] env[68194]: DEBUG nova.compute.manager [None req-7f14011d-8ba7-45f5-a611-3321fa27a1f8 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] [instance: dc7215fa-bc03-464e-81f0-22636be16748] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1695.518322] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance b487291e-1b85-4064-9949-3d8895b6dcae could not be found. [ 1695.518526] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1695.518747] env[68194]: INFO nova.compute.manager [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1695.519018] env[68194]: DEBUG oslo.service.loopingcall [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1695.519248] env[68194]: DEBUG nova.compute.manager [-] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1695.519348] env[68194]: DEBUG nova.network.neutron [-] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1695.525955] env[68194]: DEBUG nova.compute.manager [None req-7f14011d-8ba7-45f5-a611-3321fa27a1f8 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] [instance: dc7215fa-bc03-464e-81f0-22636be16748] Instance disappeared before build. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2413}} [ 1695.543435] env[68194]: DEBUG nova.network.neutron [-] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1695.550577] env[68194]: INFO nova.compute.manager [-] [instance: b487291e-1b85-4064-9949-3d8895b6dcae] Took 0.03 seconds to deallocate network for instance. [ 1695.551620] env[68194]: DEBUG oslo_concurrency.lockutils [None req-7f14011d-8ba7-45f5-a611-3321fa27a1f8 tempest-ImagesTestJSON-1210485823 tempest-ImagesTestJSON-1210485823-project-member] Lock "dc7215fa-bc03-464e-81f0-22636be16748" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 205.377s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1695.577294] env[68194]: DEBUG nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1695.631797] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1695.632111] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1695.635075] env[68194]: INFO nova.compute.claims [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1695.647049] env[68194]: DEBUG oslo_concurrency.lockutils [None req-802d3501-1c96-4298-8217-41e95bf06f9d tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "b487291e-1b85-4064-9949-3d8895b6dcae" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.175s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1695.810465] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a11f968f-48d5-4f6d-82d6-716088ff1943 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.818327] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-84df6b5b-3902-4daa-ac4a-7b72a51f8208 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.849059] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-67299706-d399-4986-8c1b-14d8141d6c79 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.854314] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-560c9ddb-8bfb-455e-a09d-c881bab17391 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1695.867425] env[68194]: DEBUG nova.compute.provider_tree [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1695.875501] env[68194]: DEBUG nova.scheduler.client.report [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1695.891220] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.259s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1695.891738] env[68194]: DEBUG nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1695.922485] env[68194]: DEBUG nova.compute.utils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1695.924065] env[68194]: DEBUG nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1695.924279] env[68194]: DEBUG nova.network.neutron [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1695.933030] env[68194]: DEBUG nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1695.993754] env[68194]: DEBUG nova.policy [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '558d53533dd4415a9c3c66f01d3ce6d4', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'a0fb97649d1045689a80d83477a6f25e', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1695.996913] env[68194]: DEBUG nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1696.022986] env[68194]: DEBUG nova.virt.hardware [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1696.023258] env[68194]: DEBUG nova.virt.hardware [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1696.023457] env[68194]: DEBUG nova.virt.hardware [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1696.023597] env[68194]: DEBUG nova.virt.hardware [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1696.023743] env[68194]: DEBUG nova.virt.hardware [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1696.023890] env[68194]: DEBUG nova.virt.hardware [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1696.024111] env[68194]: DEBUG nova.virt.hardware [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1696.024304] env[68194]: DEBUG nova.virt.hardware [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1696.024542] env[68194]: DEBUG nova.virt.hardware [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1696.024721] env[68194]: DEBUG nova.virt.hardware [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1696.024895] env[68194]: DEBUG nova.virt.hardware [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1696.025757] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7acad931-ae51-4acb-84f6-885b8a14e526 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.033384] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8619b661-b964-42bc-a74e-938179b73bdd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1696.298258] env[68194]: DEBUG nova.network.neutron [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Successfully created port: bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1696.586113] env[68194]: DEBUG nova.network.neutron [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Successfully created port: f40a51a6-b4ae-4934-b030-b2eb91a36ec1 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1697.132205] env[68194]: DEBUG nova.network.neutron [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Successfully updated port: bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1697.386373] env[68194]: DEBUG nova.compute.manager [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Received event network-vif-plugged-bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1697.386373] env[68194]: DEBUG oslo_concurrency.lockutils [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] Acquiring lock "4bcfda9d-e14b-441c-aebb-498dbc10513e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1697.386373] env[68194]: DEBUG oslo_concurrency.lockutils [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] Lock "4bcfda9d-e14b-441c-aebb-498dbc10513e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1697.386373] env[68194]: DEBUG oslo_concurrency.lockutils [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] Lock "4bcfda9d-e14b-441c-aebb-498dbc10513e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1697.386373] env[68194]: DEBUG nova.compute.manager [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] No waiting events found dispatching network-vif-plugged-bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1697.386373] env[68194]: WARNING nova.compute.manager [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Received unexpected event network-vif-plugged-bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a for instance with vm_state building and task_state spawning. [ 1697.386373] env[68194]: DEBUG nova.compute.manager [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Received event network-changed-bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1697.386373] env[68194]: DEBUG nova.compute.manager [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Refreshing instance network info cache due to event network-changed-bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1697.386840] env[68194]: DEBUG oslo_concurrency.lockutils [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] Acquiring lock "refresh_cache-4bcfda9d-e14b-441c-aebb-498dbc10513e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1697.387153] env[68194]: DEBUG oslo_concurrency.lockutils [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] Acquired lock "refresh_cache-4bcfda9d-e14b-441c-aebb-498dbc10513e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1697.387447] env[68194]: DEBUG nova.network.neutron [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Refreshing network info cache for port bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1697.446599] env[68194]: DEBUG nova.network.neutron [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1697.587338] env[68194]: DEBUG nova.network.neutron [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1697.597084] env[68194]: DEBUG oslo_concurrency.lockutils [req-87b9607d-5f38-4bb5-9ce1-1847358be91f req-4d87354b-ebbd-47f9-a5e1-ba1385416c27 service nova] Releasing lock "refresh_cache-4bcfda9d-e14b-441c-aebb-498dbc10513e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1697.759752] env[68194]: DEBUG nova.network.neutron [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Successfully updated port: f40a51a6-b4ae-4934-b030-b2eb91a36ec1 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1697.767988] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "refresh_cache-4bcfda9d-e14b-441c-aebb-498dbc10513e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1697.768159] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquired lock "refresh_cache-4bcfda9d-e14b-441c-aebb-498dbc10513e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1697.768314] env[68194]: DEBUG nova.network.neutron [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1697.807891] env[68194]: DEBUG nova.network.neutron [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1698.286711] env[68194]: DEBUG nova.network.neutron [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Updating instance_info_cache with network_info: [{"id": "bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a", "address": "fa:16:3e:6f:e2:09", "network": {"id": "b7c1595a-f83b-427f-a31f-4f69a16f36e3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1068248310", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcf7e5af-8e", "ovs_interfaceid": "bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f40a51a6-b4ae-4934-b030-b2eb91a36ec1", "address": "fa:16:3e:33:46:51", "network": {"id": "1e4b01ab-d309-457b-8089-9b73759346fd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-838167154", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf40a51a6-b4", "ovs_interfaceid": "f40a51a6-b4ae-4934-b030-b2eb91a36ec1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1698.300547] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Releasing lock "refresh_cache-4bcfda9d-e14b-441c-aebb-498dbc10513e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1698.300906] env[68194]: DEBUG nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Instance network_info: |[{"id": "bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a", "address": "fa:16:3e:6f:e2:09", "network": {"id": "b7c1595a-f83b-427f-a31f-4f69a16f36e3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1068248310", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcf7e5af-8e", "ovs_interfaceid": "bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f40a51a6-b4ae-4934-b030-b2eb91a36ec1", "address": "fa:16:3e:33:46:51", "network": {"id": "1e4b01ab-d309-457b-8089-9b73759346fd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-838167154", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf40a51a6-b4", "ovs_interfaceid": "f40a51a6-b4ae-4934-b030-b2eb91a36ec1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1698.301345] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6f:e2:09', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '5e839c46-1ae9-43b7-9518-8f18f48100dd', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a', 'vif_model': 'vmxnet3'}, {'network_name': 'br-int', 'mac_address': 'fa:16:3e:33:46:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2321dbbe-f64a-4253-a462-21676f8a278e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f40a51a6-b4ae-4934-b030-b2eb91a36ec1', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1698.310476] env[68194]: DEBUG oslo.service.loopingcall [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1698.310938] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1698.311187] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-51b09b19-f1b2-4cde-a499-ce356b7ef682 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.333807] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1698.333807] env[68194]: value = "task-3466921" [ 1698.333807] env[68194]: _type = "Task" [ 1698.333807] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.341636] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466921, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1698.844294] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466921, 'name': CreateVM_Task, 'duration_secs': 0.34287} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1698.844452] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1698.845217] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1698.845431] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1698.845706] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1698.845947] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a429771f-8c12-4246-a839-c57cdd141e74 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1698.850259] env[68194]: DEBUG oslo_vmware.api [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Waiting for the task: (returnval){ [ 1698.850259] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5268688a-acc6-dd88-3972-dbf5731ac875" [ 1698.850259] env[68194]: _type = "Task" [ 1698.850259] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1698.859135] env[68194]: DEBUG oslo_vmware.api [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5268688a-acc6-dd88-3972-dbf5731ac875, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1699.362541] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1699.362819] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1699.363018] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1699.432648] env[68194]: DEBUG nova.compute.manager [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Received event network-vif-plugged-f40a51a6-b4ae-4934-b030-b2eb91a36ec1 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1699.432815] env[68194]: DEBUG oslo_concurrency.lockutils [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] Acquiring lock "4bcfda9d-e14b-441c-aebb-498dbc10513e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1699.432987] env[68194]: DEBUG oslo_concurrency.lockutils [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] Lock "4bcfda9d-e14b-441c-aebb-498dbc10513e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1699.433141] env[68194]: DEBUG oslo_concurrency.lockutils [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] Lock "4bcfda9d-e14b-441c-aebb-498dbc10513e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1699.433314] env[68194]: DEBUG nova.compute.manager [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] No waiting events found dispatching network-vif-plugged-f40a51a6-b4ae-4934-b030-b2eb91a36ec1 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1699.433480] env[68194]: WARNING nova.compute.manager [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Received unexpected event network-vif-plugged-f40a51a6-b4ae-4934-b030-b2eb91a36ec1 for instance with vm_state building and task_state spawning. [ 1699.433640] env[68194]: DEBUG nova.compute.manager [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Received event network-changed-f40a51a6-b4ae-4934-b030-b2eb91a36ec1 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1699.433796] env[68194]: DEBUG nova.compute.manager [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Refreshing instance network info cache due to event network-changed-f40a51a6-b4ae-4934-b030-b2eb91a36ec1. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1699.433983] env[68194]: DEBUG oslo_concurrency.lockutils [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] Acquiring lock "refresh_cache-4bcfda9d-e14b-441c-aebb-498dbc10513e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1699.434133] env[68194]: DEBUG oslo_concurrency.lockutils [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] Acquired lock "refresh_cache-4bcfda9d-e14b-441c-aebb-498dbc10513e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1699.434373] env[68194]: DEBUG nova.network.neutron [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Refreshing network info cache for port f40a51a6-b4ae-4934-b030-b2eb91a36ec1 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1699.665714] env[68194]: DEBUG nova.network.neutron [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Updated VIF entry in instance network info cache for port f40a51a6-b4ae-4934-b030-b2eb91a36ec1. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1699.666174] env[68194]: DEBUG nova.network.neutron [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Updating instance_info_cache with network_info: [{"id": "bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a", "address": "fa:16:3e:6f:e2:09", "network": {"id": "b7c1595a-f83b-427f-a31f-4f69a16f36e3", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-1068248310", "subnets": [{"cidr": "192.168.128.0/24", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.245", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "5e839c46-1ae9-43b7-9518-8f18f48100dd", "external-id": "nsx-vlan-transportzone-666", "segmentation_id": 666, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapbcf7e5af-8e", "ovs_interfaceid": "bcf7e5af-8ea8-43ae-9653-8b4b4339bf4a", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}, {"id": "f40a51a6-b4ae-4934-b030-b2eb91a36ec1", "address": "fa:16:3e:33:46:51", "network": {"id": "1e4b01ab-d309-457b-8089-9b73759346fd", "bridge": "br-int", "label": "tempest-ServersTestMultiNic-838167154", "subnets": [{"cidr": "192.168.129.0/24", "dns": [], "gateway": {"address": "192.168.129.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.129.6", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.129.2"}}], "meta": {"injected": false, "tenant_id": "a0fb97649d1045689a80d83477a6f25e", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2321dbbe-f64a-4253-a462-21676f8a278e", "external-id": "nsx-vlan-transportzone-714", "segmentation_id": 714, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf40a51a6-b4", "ovs_interfaceid": "f40a51a6-b4ae-4934-b030-b2eb91a36ec1", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1699.675938] env[68194]: DEBUG oslo_concurrency.lockutils [req-9f168286-ff24-4821-9539-5e3fbf4df0e3 req-d7f7d6b4-4806-425c-8e58-4de79ca8a045 service nova] Releasing lock "refresh_cache-4bcfda9d-e14b-441c-aebb-498dbc10513e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1708.543889] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1708.544195] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1708.718187] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "7142c793-cb3a-4bb0-87b6-c7fd5547f252" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1708.718187] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "7142c793-cb3a-4bb0-87b6-c7fd5547f252" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1718.416299] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1718.416631] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1721.416583] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1724.418311] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1725.416957] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1726.417767] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1727.415732] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.412376] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.416035] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.416204] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1728.416329] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1728.438187] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1728.438357] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1728.438489] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1728.438616] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1728.438739] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1728.438860] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1728.438980] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1728.439111] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1728.439232] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1728.439349] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1728.439468] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1728.439943] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1728.450853] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1728.451072] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1728.451244] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1728.451397] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1728.452471] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1343a6d5-3143-40aa-bb45-90844ae4f379 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.461033] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25158846-4ebd-40c6-9871-9a04d6163e73 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.474275] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14f5e134-d49d-443a-806c-bd534e11d086 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.480071] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-848ed7af-7027-4ae2-bff9-77524aa723c3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.508772] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180969MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1728.508911] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1728.509116] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1728.576229] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.576423] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.576565] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bf9766c7-1495-4edd-92bd-06a0d036855e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.576736] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.576864] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3da3b410-889a-42c5-9603-f92f689ab5b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.576988] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 95be4f59-e835-4389-93ae-9814e97f8ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.577124] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 73abf0ba-016c-4536-afd3-f6c6960045fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.577244] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2b833505-f170-46ea-8d14-c449f88a7d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.577360] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.577476] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4bcfda9d-e14b-441c-aebb-498dbc10513e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1728.589282] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bd9479c5-a9f5-47a6-b731-f0bf4633b688 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1728.599028] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1728.608126] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1728.608339] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1728.608489] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1728.755635] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0b2464fa-7985-4b65-8c8c-0b8c83d9edfd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.762999] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7f0d02ec-6cfe-40b9-83d4-bec84e3db4c5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.791715] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e43c9352-a05c-4dff-9218-0a8afe92efa6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.798262] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88580d7a-ccef-4c43-a8ab-fb382fdacb63 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1728.811671] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1728.822470] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1728.834998] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1728.835193] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.326s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1743.588975] env[68194]: WARNING oslo_vmware.rw_handles [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1743.588975] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1743.588975] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1743.588975] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1743.588975] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1743.588975] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1743.588975] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1743.588975] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1743.588975] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1743.588975] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1743.588975] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1743.588975] env[68194]: ERROR oslo_vmware.rw_handles [ 1743.589568] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/16d70dd5-b121-495b-a07b-435f02592f6d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1743.591234] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1743.591490] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Copying Virtual Disk [datastore1] vmware_temp/16d70dd5-b121-495b-a07b-435f02592f6d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/16d70dd5-b121-495b-a07b-435f02592f6d/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1743.591908] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-33648484-6b6f-4d2d-a316-9da2606602b9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1743.599959] env[68194]: DEBUG oslo_vmware.api [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Waiting for the task: (returnval){ [ 1743.599959] env[68194]: value = "task-3466922" [ 1743.599959] env[68194]: _type = "Task" [ 1743.599959] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1743.607738] env[68194]: DEBUG oslo_vmware.api [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Task: {'id': task-3466922, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.110584] env[68194]: DEBUG oslo_vmware.exceptions [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1744.110873] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1744.111488] env[68194]: ERROR nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1744.111488] env[68194]: Faults: ['InvalidArgument'] [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Traceback (most recent call last): [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] yield resources [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] self.driver.spawn(context, instance, image_meta, [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] self._fetch_image_if_missing(context, vi) [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] image_cache(vi, tmp_image_ds_loc) [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] vm_util.copy_virtual_disk( [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] session._wait_for_task(vmdk_copy_task) [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] return self.wait_for_task(task_ref) [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] return evt.wait() [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] result = hub.switch() [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] return self.greenlet.switch() [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] self.f(*self.args, **self.kw) [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] raise exceptions.translate_fault(task_info.error) [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Faults: ['InvalidArgument'] [ 1744.111488] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] [ 1744.112790] env[68194]: INFO nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Terminating instance [ 1744.113340] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1744.113558] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1744.113798] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-40d29108-e122-4df1-a250-05400d2f07a7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.118580] env[68194]: DEBUG nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1744.118774] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1744.119538] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4070772-e46f-4ba0-92c8-f3087dd87e0b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.122962] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1744.123141] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1744.124196] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-efb3ff30-4392-4cdb-9c2f-04debd485f7d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.128340] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1744.128812] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-0e611931-d2e9-41f8-8fcc-77ad7f9ad134 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.131278] env[68194]: DEBUG oslo_vmware.api [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Waiting for the task: (returnval){ [ 1744.131278] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]529e78bc-6803-1f01-57d6-7d30ba181cc8" [ 1744.131278] env[68194]: _type = "Task" [ 1744.131278] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.143798] env[68194]: DEBUG oslo_vmware.api [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]529e78bc-6803-1f01-57d6-7d30ba181cc8, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.191064] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1744.191301] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1744.191487] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Deleting the datastore file [datastore1] 4fb56c2b-1556-479e-9d4e-136a8d1d15ad {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1744.191751] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-47e31429-b93c-4a93-b36c-9fd40f15cc1e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.198285] env[68194]: DEBUG oslo_vmware.api [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Waiting for the task: (returnval){ [ 1744.198285] env[68194]: value = "task-3466924" [ 1744.198285] env[68194]: _type = "Task" [ 1744.198285] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1744.205883] env[68194]: DEBUG oslo_vmware.api [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Task: {'id': task-3466924, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1744.641365] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1744.641689] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Creating directory with path [datastore1] vmware_temp/06b72d41-a381-48f7-bb82-1a4dd434862f/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1744.641962] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-32be12b9-220f-42de-bbdd-41a70f296f75 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.654214] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Created directory with path [datastore1] vmware_temp/06b72d41-a381-48f7-bb82-1a4dd434862f/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1744.654394] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Fetch image to [datastore1] vmware_temp/06b72d41-a381-48f7-bb82-1a4dd434862f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1744.654545] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/06b72d41-a381-48f7-bb82-1a4dd434862f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1744.655314] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b80fb3e2-0630-4e4b-a1e1-3e1442c8ced6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.661512] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-339642d3-d3ec-4edd-afc7-9a0d9b82fb86 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.670103] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61347962-aae8-4b49-bc93-495010fe382b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.702874] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca047953-9684-4cf7-be09-086630b3d840 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.711437] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e2e894a2-aa08-407d-b5e7-92394fca9b0a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.713078] env[68194]: DEBUG oslo_vmware.api [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Task: {'id': task-3466924, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075318} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1744.713322] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1744.713502] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1744.713673] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1744.713848] env[68194]: INFO nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1744.715970] env[68194]: DEBUG nova.compute.claims [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1744.716160] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1744.716371] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1744.735176] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1744.785705] env[68194]: DEBUG oslo_vmware.rw_handles [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/06b72d41-a381-48f7-bb82-1a4dd434862f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1744.844007] env[68194]: DEBUG oslo_vmware.rw_handles [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1744.844210] env[68194]: DEBUG oslo_vmware.rw_handles [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/06b72d41-a381-48f7-bb82-1a4dd434862f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1744.956879] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-57e1f2d0-fe7a-4d38-a18e-02ba2bc8ba53 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.964399] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2600324c-44ae-4368-8266-82ca70782ed7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1744.993746] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-285c2d6d-0408-4d5b-b82e-0698876d602c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.000738] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec22eeee-c287-42cd-9d83-ed64c73d193a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.014882] env[68194]: DEBUG nova.compute.provider_tree [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1745.023197] env[68194]: DEBUG nova.scheduler.client.report [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1745.036728] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.320s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1745.037275] env[68194]: ERROR nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1745.037275] env[68194]: Faults: ['InvalidArgument'] [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Traceback (most recent call last): [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] self.driver.spawn(context, instance, image_meta, [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] self._fetch_image_if_missing(context, vi) [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] image_cache(vi, tmp_image_ds_loc) [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] vm_util.copy_virtual_disk( [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] session._wait_for_task(vmdk_copy_task) [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] return self.wait_for_task(task_ref) [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] return evt.wait() [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] result = hub.switch() [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] return self.greenlet.switch() [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] self.f(*self.args, **self.kw) [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] raise exceptions.translate_fault(task_info.error) [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Faults: ['InvalidArgument'] [ 1745.037275] env[68194]: ERROR nova.compute.manager [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] [ 1745.038081] env[68194]: DEBUG nova.compute.utils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1745.039649] env[68194]: DEBUG nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Build of instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad was re-scheduled: A specified parameter was not correct: fileType [ 1745.039649] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1745.040075] env[68194]: DEBUG nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1745.040259] env[68194]: DEBUG nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1745.040429] env[68194]: DEBUG nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1745.040590] env[68194]: DEBUG nova.network.neutron [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1745.357221] env[68194]: DEBUG nova.network.neutron [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1745.368144] env[68194]: INFO nova.compute.manager [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Took 0.33 seconds to deallocate network for instance. [ 1745.473322] env[68194]: INFO nova.scheduler.client.report [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Deleted allocations for instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad [ 1745.501364] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a6ae175e-0612-40c5-a32f-603a97fb0f3c tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Lock "4fb56c2b-1556-479e-9d4e-136a8d1d15ad" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 587.219s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1745.502605] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Lock "4fb56c2b-1556-479e-9d4e-136a8d1d15ad" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 391.237s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1745.502905] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Acquiring lock "4fb56c2b-1556-479e-9d4e-136a8d1d15ad-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1745.503221] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Lock "4fb56c2b-1556-479e-9d4e-136a8d1d15ad-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1745.503446] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Lock "4fb56c2b-1556-479e-9d4e-136a8d1d15ad-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1745.505529] env[68194]: INFO nova.compute.manager [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Terminating instance [ 1745.508568] env[68194]: DEBUG nova.compute.manager [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1745.508906] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1745.509181] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-67d2d3da-a4d9-4046-bc5f-9c3be6dc11d1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.521248] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e377b5c-dcfb-4d14-82da-90e7bd1a2702 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.531591] env[68194]: DEBUG nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1745.552922] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4fb56c2b-1556-479e-9d4e-136a8d1d15ad could not be found. [ 1745.552922] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1745.552922] env[68194]: INFO nova.compute.manager [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1745.552922] env[68194]: DEBUG oslo.service.loopingcall [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1745.553229] env[68194]: DEBUG nova.compute.manager [-] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1745.553284] env[68194]: DEBUG nova.network.neutron [-] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1745.585263] env[68194]: DEBUG nova.network.neutron [-] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1745.590167] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1745.590585] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1745.592211] env[68194]: INFO nova.compute.claims [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1745.595681] env[68194]: INFO nova.compute.manager [-] [instance: 4fb56c2b-1556-479e-9d4e-136a8d1d15ad] Took 0.04 seconds to deallocate network for instance. [ 1745.694393] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2ce7bdd7-1c2b-46d6-975c-4d827174a04a tempest-ServersTestJSON-440377855 tempest-ServersTestJSON-440377855-project-member] Lock "4fb56c2b-1556-479e-9d4e-136a8d1d15ad" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.192s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1745.805909] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28e7669f-284a-4e68-8973-e36680e55e36 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.813872] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b0071066-2dc3-4dea-b629-7480630e1a22 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.843542] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c8ee807c-ff73-4906-8013-6f3e5cd249f6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.851043] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1531b0a7-e0f9-49d6-b1db-58d91c604e37 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1745.864355] env[68194]: DEBUG nova.compute.provider_tree [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1745.873276] env[68194]: DEBUG nova.scheduler.client.report [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1745.888662] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.298s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1745.889145] env[68194]: DEBUG nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1745.928121] env[68194]: DEBUG nova.compute.utils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1745.930018] env[68194]: DEBUG nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1745.930208] env[68194]: DEBUG nova.network.neutron [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1745.938589] env[68194]: DEBUG nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1745.992726] env[68194]: DEBUG nova.policy [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ee71b35a8e64d6d8fb2a7da304db996', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e921c94dac1c4681afddfdf2902d672a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1746.027939] env[68194]: DEBUG nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1746.059942] env[68194]: DEBUG nova.virt.hardware [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1746.060215] env[68194]: DEBUG nova.virt.hardware [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1746.060377] env[68194]: DEBUG nova.virt.hardware [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1746.060560] env[68194]: DEBUG nova.virt.hardware [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1746.060706] env[68194]: DEBUG nova.virt.hardware [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1746.060848] env[68194]: DEBUG nova.virt.hardware [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1746.061240] env[68194]: DEBUG nova.virt.hardware [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1746.061477] env[68194]: DEBUG nova.virt.hardware [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1746.061477] env[68194]: DEBUG nova.virt.hardware [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1746.061616] env[68194]: DEBUG nova.virt.hardware [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1746.061800] env[68194]: DEBUG nova.virt.hardware [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1746.062842] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ddacd284-76ea-4c9d-b447-9c6c39ed40fb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.072318] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-236c340f-3259-4215-9335-5eb26a8c07d1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1746.325745] env[68194]: DEBUG nova.network.neutron [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Successfully created port: f7e12679-eaad-4cab-811e-920405ca3359 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1746.882818] env[68194]: DEBUG nova.network.neutron [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Successfully updated port: f7e12679-eaad-4cab-811e-920405ca3359 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1746.894211] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "refresh_cache-bd9479c5-a9f5-47a6-b731-f0bf4633b688" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1746.894389] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquired lock "refresh_cache-bd9479c5-a9f5-47a6-b731-f0bf4633b688" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1746.894545] env[68194]: DEBUG nova.network.neutron [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1746.931605] env[68194]: DEBUG nova.network.neutron [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1747.090936] env[68194]: DEBUG nova.network.neutron [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Updating instance_info_cache with network_info: [{"id": "f7e12679-eaad-4cab-811e-920405ca3359", "address": "fa:16:3e:75:62:05", "network": {"id": "e47632ac-218f-49b6-baca-b148ca44ea7b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1967602305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e921c94dac1c4681afddfdf2902d672a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7e12679-ea", "ovs_interfaceid": "f7e12679-eaad-4cab-811e-920405ca3359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1747.106411] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Releasing lock "refresh_cache-bd9479c5-a9f5-47a6-b731-f0bf4633b688" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1747.106733] env[68194]: DEBUG nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Instance network_info: |[{"id": "f7e12679-eaad-4cab-811e-920405ca3359", "address": "fa:16:3e:75:62:05", "network": {"id": "e47632ac-218f-49b6-baca-b148ca44ea7b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1967602305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e921c94dac1c4681afddfdf2902d672a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7e12679-ea", "ovs_interfaceid": "f7e12679-eaad-4cab-811e-920405ca3359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1747.107200] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:75:62:05', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'f7e12679-eaad-4cab-811e-920405ca3359', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1747.115224] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Creating folder: Project (e921c94dac1c4681afddfdf2902d672a). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1747.115925] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-c3c7e1f9-7911-421b-bf9d-88734bee32e2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.127475] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Created folder: Project (e921c94dac1c4681afddfdf2902d672a) in parent group-v692426. [ 1747.127662] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Creating folder: Instances. Parent ref: group-v692528. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1747.127887] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-9489274a-4261-442b-8a49-a7a029826fdd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.137136] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Created folder: Instances in parent group-v692528. [ 1747.137391] env[68194]: DEBUG oslo.service.loopingcall [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1747.137690] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1747.137770] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b47ff141-3541-41e3-b6bd-5df7934755d9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.157620] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1747.157620] env[68194]: value = "task-3466927" [ 1747.157620] env[68194]: _type = "Task" [ 1747.157620] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.165404] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466927, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1747.398264] env[68194]: DEBUG nova.compute.manager [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Received event network-vif-plugged-f7e12679-eaad-4cab-811e-920405ca3359 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1747.398538] env[68194]: DEBUG oslo_concurrency.lockutils [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] Acquiring lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1747.398826] env[68194]: DEBUG oslo_concurrency.lockutils [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] Lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1747.399155] env[68194]: DEBUG oslo_concurrency.lockutils [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] Lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1747.399354] env[68194]: DEBUG nova.compute.manager [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] No waiting events found dispatching network-vif-plugged-f7e12679-eaad-4cab-811e-920405ca3359 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1747.399525] env[68194]: WARNING nova.compute.manager [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Received unexpected event network-vif-plugged-f7e12679-eaad-4cab-811e-920405ca3359 for instance with vm_state building and task_state spawning. [ 1747.399687] env[68194]: DEBUG nova.compute.manager [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Received event network-changed-f7e12679-eaad-4cab-811e-920405ca3359 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1747.399840] env[68194]: DEBUG nova.compute.manager [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Refreshing instance network info cache due to event network-changed-f7e12679-eaad-4cab-811e-920405ca3359. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1747.400037] env[68194]: DEBUG oslo_concurrency.lockutils [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] Acquiring lock "refresh_cache-bd9479c5-a9f5-47a6-b731-f0bf4633b688" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1747.400180] env[68194]: DEBUG oslo_concurrency.lockutils [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] Acquired lock "refresh_cache-bd9479c5-a9f5-47a6-b731-f0bf4633b688" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1747.400338] env[68194]: DEBUG nova.network.neutron [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Refreshing network info cache for port f7e12679-eaad-4cab-811e-920405ca3359 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1747.649819] env[68194]: DEBUG nova.network.neutron [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Updated VIF entry in instance network info cache for port f7e12679-eaad-4cab-811e-920405ca3359. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1747.650329] env[68194]: DEBUG nova.network.neutron [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Updating instance_info_cache with network_info: [{"id": "f7e12679-eaad-4cab-811e-920405ca3359", "address": "fa:16:3e:75:62:05", "network": {"id": "e47632ac-218f-49b6-baca-b148ca44ea7b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1967602305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.11", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e921c94dac1c4681afddfdf2902d672a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapf7e12679-ea", "ovs_interfaceid": "f7e12679-eaad-4cab-811e-920405ca3359", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1747.663508] env[68194]: DEBUG oslo_concurrency.lockutils [req-35b3a9d0-2150-496d-9b08-0035828b831f req-1c510956-8253-4a37-941d-d162abe3e542 service nova] Releasing lock "refresh_cache-bd9479c5-a9f5-47a6-b731-f0bf4633b688" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1747.668076] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466927, 'name': CreateVM_Task, 'duration_secs': 0.301986} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1747.668076] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1747.668482] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1747.668641] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1747.668945] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1747.669201] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-09498f94-a519-40b7-81c2-00eeee9b5bce {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1747.673747] env[68194]: DEBUG oslo_vmware.api [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Waiting for the task: (returnval){ [ 1747.673747] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]528d8d1f-a4e3-a26f-b6e2-e509c239b7e6" [ 1747.673747] env[68194]: _type = "Task" [ 1747.673747] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1747.681815] env[68194]: DEBUG oslo_vmware.api [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]528d8d1f-a4e3-a26f-b6e2-e509c239b7e6, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1748.184332] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1748.184695] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1748.184813] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1779.812235] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1779.812542] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1783.416792] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1783.417259] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1783.417391] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Cleaning up deleted instances {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 1783.428457] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] There are 0 instances to clean {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 1783.428676] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1785.435217] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1786.417218] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1786.417465] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1787.411778] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1787.434840] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1788.416377] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1788.429750] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1788.429986] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1788.430175] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1788.430332] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1788.431445] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-28f9db12-d59d-4d3b-9220-5aa54eb93ff5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.439932] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-46229ddb-bdb9-449a-b3ec-10e7810dc37f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.453292] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-762a30a3-f445-41fc-8bd9-ff3d1bdf8d5e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.459233] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4db3bd9-44e7-4833-bd79-4f6cdd7d7ac3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.488428] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180961MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1788.488529] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1788.488709] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1788.685043] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1788.685223] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bf9766c7-1495-4edd-92bd-06a0d036855e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1788.685355] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1788.685480] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3da3b410-889a-42c5-9603-f92f689ab5b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1788.685602] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 95be4f59-e835-4389-93ae-9814e97f8ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1788.685722] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 73abf0ba-016c-4536-afd3-f6c6960045fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1788.685844] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2b833505-f170-46ea-8d14-c449f88a7d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1788.686016] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1788.686163] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4bcfda9d-e14b-441c-aebb-498dbc10513e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1788.686285] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bd9479c5-a9f5-47a6-b731-f0bf4633b688 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1788.697161] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1788.707375] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1788.707592] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1788.707736] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1788.722901] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing inventories for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 1788.736288] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Updating ProviderTree inventory for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 1788.736462] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Updating inventory in ProviderTree for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 1788.753905] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing aggregate associations for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5, aggregates: None {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 1788.770509] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing trait associations for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 1788.897614] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-71a46be7-9e83-4019-b02d-265a8e104bee {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.905315] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14871708-bfda-431e-8d04-f790d8c10ef3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.934562] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2be2daba-a844-4190-aac3-586bbc1b3674 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.942059] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ae3dd2e-42ff-48e6-9d5c-165d706cab06 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1788.954994] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1788.963397] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1788.976403] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1788.976592] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.488s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1789.972129] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1789.972489] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1789.972577] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1789.972635] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1789.992251] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1789.992391] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1789.992518] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1789.992645] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1789.992768] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1789.992887] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1789.993012] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1789.993139] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1789.993255] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1789.993370] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1789.993489] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1793.636571] env[68194]: WARNING oslo_vmware.rw_handles [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1793.636571] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1793.636571] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1793.636571] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1793.636571] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1793.636571] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1793.636571] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1793.636571] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1793.636571] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1793.636571] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1793.636571] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1793.636571] env[68194]: ERROR oslo_vmware.rw_handles [ 1793.637421] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/06b72d41-a381-48f7-bb82-1a4dd434862f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1793.639102] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1793.639356] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Copying Virtual Disk [datastore1] vmware_temp/06b72d41-a381-48f7-bb82-1a4dd434862f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/06b72d41-a381-48f7-bb82-1a4dd434862f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1793.639659] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-71255a7e-0a2e-4a80-b79c-8f479901f0d7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1793.647752] env[68194]: DEBUG oslo_vmware.api [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Waiting for the task: (returnval){ [ 1793.647752] env[68194]: value = "task-3466928" [ 1793.647752] env[68194]: _type = "Task" [ 1793.647752] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1793.655022] env[68194]: DEBUG oslo_vmware.api [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Task: {'id': task-3466928, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.157395] env[68194]: DEBUG oslo_vmware.exceptions [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1794.157663] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1794.158223] env[68194]: ERROR nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1794.158223] env[68194]: Faults: ['InvalidArgument'] [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Traceback (most recent call last): [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] yield resources [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] self.driver.spawn(context, instance, image_meta, [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] self._fetch_image_if_missing(context, vi) [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] image_cache(vi, tmp_image_ds_loc) [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] vm_util.copy_virtual_disk( [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] session._wait_for_task(vmdk_copy_task) [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] return self.wait_for_task(task_ref) [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] return evt.wait() [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] result = hub.switch() [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] return self.greenlet.switch() [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] self.f(*self.args, **self.kw) [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] raise exceptions.translate_fault(task_info.error) [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Faults: ['InvalidArgument'] [ 1794.158223] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] [ 1794.159321] env[68194]: INFO nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Terminating instance [ 1794.160101] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1794.160313] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1794.160545] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6b64d550-2a2d-4c9f-a887-5460b65bbc94 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.162678] env[68194]: DEBUG nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1794.162868] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1794.163569] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-86775d2f-2b54-4b87-948b-cd90adfe21e6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.170060] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1794.170238] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-43f6815d-a681-47da-9e59-e62c0c7ddd6c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.172313] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1794.172487] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1794.173400] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-45f57412-9712-40e5-a280-878dc7e8d394 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.177766] env[68194]: DEBUG oslo_vmware.api [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Waiting for the task: (returnval){ [ 1794.177766] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]527f26db-3b17-4307-42eb-c348ad857119" [ 1794.177766] env[68194]: _type = "Task" [ 1794.177766] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.185259] env[68194]: DEBUG oslo_vmware.api [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]527f26db-3b17-4307-42eb-c348ad857119, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.296919] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1794.297270] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1794.297545] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Deleting the datastore file [datastore1] 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1794.297877] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-80aefb44-35e8-4b8e-b40d-ad97d685c7c1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.304656] env[68194]: DEBUG oslo_vmware.api [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Waiting for the task: (returnval){ [ 1794.304656] env[68194]: value = "task-3466930" [ 1794.304656] env[68194]: _type = "Task" [ 1794.304656] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1794.312183] env[68194]: DEBUG oslo_vmware.api [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Task: {'id': task-3466930, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1794.687819] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1794.688252] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Creating directory with path [datastore1] vmware_temp/15c7f120-4bd7-47dc-a0dd-323504b68629/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1794.688307] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4cc11e98-4aad-4c7f-b8c5-703b19c73214 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.699173] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Created directory with path [datastore1] vmware_temp/15c7f120-4bd7-47dc-a0dd-323504b68629/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1794.699358] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Fetch image to [datastore1] vmware_temp/15c7f120-4bd7-47dc-a0dd-323504b68629/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1794.699530] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/15c7f120-4bd7-47dc-a0dd-323504b68629/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1794.700239] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9f9b3b2-54a9-4d72-b2f7-336b07b0cc51 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.706954] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-442a746f-3244-4afb-9d38-d6ce557c4300 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.715729] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cb4cc1e1-8924-4e95-8ccd-6d6aba48385f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.746066] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-720e10d4-9b90-4a72-8b18-042d3b1d6b14 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.751160] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a6d6100c-4fe7-4cd4-a041-b8acc593528e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1794.775714] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1794.812770] env[68194]: DEBUG oslo_vmware.api [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Task: {'id': task-3466930, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079993} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1794.814430] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1794.814622] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1794.814798] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1794.814977] env[68194]: INFO nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Took 0.65 seconds to destroy the instance on the hypervisor. [ 1794.817073] env[68194]: DEBUG nova.compute.claims [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1794.817234] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1794.817468] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1794.827923] env[68194]: DEBUG oslo_vmware.rw_handles [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15c7f120-4bd7-47dc-a0dd-323504b68629/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1794.887929] env[68194]: DEBUG oslo_vmware.rw_handles [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1794.888062] env[68194]: DEBUG oslo_vmware.rw_handles [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/15c7f120-4bd7-47dc-a0dd-323504b68629/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1795.025922] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2de75ac5-0c08-4ac0-9565-4b76e7ff86db {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.033273] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e1140f6-0414-4a39-95ac-8d7563a02e72 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.062841] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f39d90a6-550b-4532-949d-377bab7d34cc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.071155] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-015e468e-6b75-48a8-b27d-08bbb26ab491 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.085865] env[68194]: DEBUG nova.compute.provider_tree [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1795.094060] env[68194]: DEBUG nova.scheduler.client.report [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1795.111014] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.293s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1795.111568] env[68194]: ERROR nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1795.111568] env[68194]: Faults: ['InvalidArgument'] [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Traceback (most recent call last): [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] self.driver.spawn(context, instance, image_meta, [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] self._fetch_image_if_missing(context, vi) [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] image_cache(vi, tmp_image_ds_loc) [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] vm_util.copy_virtual_disk( [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] session._wait_for_task(vmdk_copy_task) [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] return self.wait_for_task(task_ref) [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] return evt.wait() [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] result = hub.switch() [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] return self.greenlet.switch() [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] self.f(*self.args, **self.kw) [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] raise exceptions.translate_fault(task_info.error) [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Faults: ['InvalidArgument'] [ 1795.111568] env[68194]: ERROR nova.compute.manager [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] [ 1795.112387] env[68194]: DEBUG nova.compute.utils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1795.114325] env[68194]: DEBUG nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Build of instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 was re-scheduled: A specified parameter was not correct: fileType [ 1795.114325] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1795.114521] env[68194]: DEBUG nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1795.114701] env[68194]: DEBUG nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1795.114867] env[68194]: DEBUG nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1795.115044] env[68194]: DEBUG nova.network.neutron [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1795.845965] env[68194]: DEBUG nova.network.neutron [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1795.859938] env[68194]: INFO nova.compute.manager [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Took 0.74 seconds to deallocate network for instance. [ 1795.954262] env[68194]: INFO nova.scheduler.client.report [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Deleted allocations for instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 [ 1795.981228] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1f6e8ffc-1d70-468d-b3c5-9705ec9544a5 tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 579.459s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1795.982376] env[68194]: DEBUG oslo_concurrency.lockutils [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 382.785s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1795.982600] env[68194]: DEBUG oslo_concurrency.lockutils [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1795.982819] env[68194]: DEBUG oslo_concurrency.lockutils [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1795.982972] env[68194]: DEBUG oslo_concurrency.lockutils [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1795.985293] env[68194]: INFO nova.compute.manager [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Terminating instance [ 1795.986868] env[68194]: DEBUG nova.compute.manager [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1795.987574] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1795.987795] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9e593059-3e94-4315-afdc-f36224b7ee84 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1795.997213] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50c67f35-e0c3-464b-97ec-ba8121aced45 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.013885] env[68194]: DEBUG nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1796.025301] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555 could not be found. [ 1796.025491] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1796.025666] env[68194]: INFO nova.compute.manager [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1796.025900] env[68194]: DEBUG oslo.service.loopingcall [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1796.026363] env[68194]: DEBUG nova.compute.manager [-] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1796.026458] env[68194]: DEBUG nova.network.neutron [-] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1796.052310] env[68194]: DEBUG nova.network.neutron [-] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1796.059130] env[68194]: INFO nova.compute.manager [-] [instance: 5e89b8b7-5c04-4f3e-a027-13f5c4a3a555] Took 0.03 seconds to deallocate network for instance. [ 1796.064598] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1796.064865] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1796.066269] env[68194]: INFO nova.compute.claims [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1796.215092] env[68194]: DEBUG oslo_concurrency.lockutils [None req-784b5677-4515-4fa6-8c30-04483f72eaaa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "5e89b8b7-5c04-4f3e-a027-13f5c4a3a555" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.233s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1796.281505] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a20b2069-aecd-4e90-b382-d54c92d2f133 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.291119] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-938c5fcf-8782-4feb-88fc-9b57cb2f10a4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.322417] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dad42c05-c76c-4659-b750-8e2fd3b02b89 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.328909] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d43decb0-e513-4383-bcd0-c5def6803d43 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.341618] env[68194]: DEBUG nova.compute.provider_tree [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1796.350141] env[68194]: DEBUG nova.scheduler.client.report [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1796.364817] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.300s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1796.365302] env[68194]: DEBUG nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1796.402465] env[68194]: DEBUG nova.compute.utils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1796.404035] env[68194]: DEBUG nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1796.404035] env[68194]: DEBUG nova.network.neutron [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1796.412316] env[68194]: DEBUG nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1796.415965] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1796.416807] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Cleaning up deleted instances with incomplete migration {{(pid=68194) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 1796.474638] env[68194]: DEBUG nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1796.483275] env[68194]: DEBUG nova.policy [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a123085b0224f32ac7c2cf03b9d174d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cbb20a136093487abd1ee3965dd03518', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1796.500078] env[68194]: DEBUG nova.virt.hardware [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1796.500359] env[68194]: DEBUG nova.virt.hardware [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1796.500560] env[68194]: DEBUG nova.virt.hardware [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1796.500799] env[68194]: DEBUG nova.virt.hardware [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1796.500982] env[68194]: DEBUG nova.virt.hardware [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1796.501181] env[68194]: DEBUG nova.virt.hardware [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1796.501504] env[68194]: DEBUG nova.virt.hardware [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1796.501594] env[68194]: DEBUG nova.virt.hardware [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1796.501798] env[68194]: DEBUG nova.virt.hardware [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1796.501972] env[68194]: DEBUG nova.virt.hardware [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1796.502159] env[68194]: DEBUG nova.virt.hardware [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1796.502994] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a74bbb3e-c3ff-44c1-b4b2-9b57188a42a1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.511513] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7d8d4d19-78b1-438f-b3d1-3eb62aa9c572 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1796.967353] env[68194]: DEBUG nova.network.neutron [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Successfully created port: 9d836b07-d90b-45b5-a949-9fd57c60fc79 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1797.501658] env[68194]: DEBUG nova.compute.manager [req-d3d7c6b0-b741-4815-ab73-63b52a21da74 req-7cd50029-f6cd-4d99-949e-5b2087a0b487 service nova] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Received event network-vif-plugged-9d836b07-d90b-45b5-a949-9fd57c60fc79 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1797.501884] env[68194]: DEBUG oslo_concurrency.lockutils [req-d3d7c6b0-b741-4815-ab73-63b52a21da74 req-7cd50029-f6cd-4d99-949e-5b2087a0b487 service nova] Acquiring lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1797.502125] env[68194]: DEBUG oslo_concurrency.lockutils [req-d3d7c6b0-b741-4815-ab73-63b52a21da74 req-7cd50029-f6cd-4d99-949e-5b2087a0b487 service nova] Lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1797.502302] env[68194]: DEBUG oslo_concurrency.lockutils [req-d3d7c6b0-b741-4815-ab73-63b52a21da74 req-7cd50029-f6cd-4d99-949e-5b2087a0b487 service nova] Lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1797.502471] env[68194]: DEBUG nova.compute.manager [req-d3d7c6b0-b741-4815-ab73-63b52a21da74 req-7cd50029-f6cd-4d99-949e-5b2087a0b487 service nova] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] No waiting events found dispatching network-vif-plugged-9d836b07-d90b-45b5-a949-9fd57c60fc79 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1797.502639] env[68194]: WARNING nova.compute.manager [req-d3d7c6b0-b741-4815-ab73-63b52a21da74 req-7cd50029-f6cd-4d99-949e-5b2087a0b487 service nova] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Received unexpected event network-vif-plugged-9d836b07-d90b-45b5-a949-9fd57c60fc79 for instance with vm_state building and task_state spawning. [ 1797.523612] env[68194]: DEBUG nova.network.neutron [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Successfully updated port: 9d836b07-d90b-45b5-a949-9fd57c60fc79 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1797.536061] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "refresh_cache-1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1797.538274] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquired lock "refresh_cache-1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1797.538479] env[68194]: DEBUG nova.network.neutron [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1797.578902] env[68194]: DEBUG nova.network.neutron [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1797.735742] env[68194]: DEBUG nova.network.neutron [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Updating instance_info_cache with network_info: [{"id": "9d836b07-d90b-45b5-a949-9fd57c60fc79", "address": "fa:16:3e:9c:b4:e4", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d836b07-d9", "ovs_interfaceid": "9d836b07-d90b-45b5-a949-9fd57c60fc79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1797.747967] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Releasing lock "refresh_cache-1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1797.748272] env[68194]: DEBUG nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Instance network_info: |[{"id": "9d836b07-d90b-45b5-a949-9fd57c60fc79", "address": "fa:16:3e:9c:b4:e4", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d836b07-d9", "ovs_interfaceid": "9d836b07-d90b-45b5-a949-9fd57c60fc79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1797.748654] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:9c:b4:e4', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '9d836b07-d90b-45b5-a949-9fd57c60fc79', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1797.756121] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Creating folder: Project (cbb20a136093487abd1ee3965dd03518). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1797.756626] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-6b1aa909-20d0-44dd-ab91-3051dc7b0e53 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.766439] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Created folder: Project (cbb20a136093487abd1ee3965dd03518) in parent group-v692426. [ 1797.766584] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Creating folder: Instances. Parent ref: group-v692531. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 1797.766797] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-cbc95837-16f4-4e11-99cb-62083cf75ec1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.774750] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Created folder: Instances in parent group-v692531. [ 1797.774974] env[68194]: DEBUG oslo.service.loopingcall [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1797.775166] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1797.775357] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-cc1ecc06-981f-4d42-acf0-0b196da0b007 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1797.793135] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1797.793135] env[68194]: value = "task-3466933" [ 1797.793135] env[68194]: _type = "Task" [ 1797.793135] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1797.800272] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466933, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.303468] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466933, 'name': CreateVM_Task, 'duration_secs': 0.298333} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1798.303908] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1798.304308] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1798.304476] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1798.304791] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1798.305046] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-788ee45a-9370-401a-a1d7-b21d53c3f0a0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1798.309077] env[68194]: DEBUG oslo_vmware.api [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Waiting for the task: (returnval){ [ 1798.309077] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52140686-b8c6-0029-6064-ae5a1f56119c" [ 1798.309077] env[68194]: _type = "Task" [ 1798.309077] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1798.315902] env[68194]: DEBUG oslo_vmware.api [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52140686-b8c6-0029-6064-ae5a1f56119c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1798.819105] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1798.819105] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1798.819310] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1799.526747] env[68194]: DEBUG nova.compute.manager [req-25f2cbaa-1a7e-4e31-a36d-fe7397b427cf req-9895d273-a65e-4196-a258-d53028fc56f5 service nova] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Received event network-changed-9d836b07-d90b-45b5-a949-9fd57c60fc79 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1799.526987] env[68194]: DEBUG nova.compute.manager [req-25f2cbaa-1a7e-4e31-a36d-fe7397b427cf req-9895d273-a65e-4196-a258-d53028fc56f5 service nova] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Refreshing instance network info cache due to event network-changed-9d836b07-d90b-45b5-a949-9fd57c60fc79. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1799.527135] env[68194]: DEBUG oslo_concurrency.lockutils [req-25f2cbaa-1a7e-4e31-a36d-fe7397b427cf req-9895d273-a65e-4196-a258-d53028fc56f5 service nova] Acquiring lock "refresh_cache-1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1799.527280] env[68194]: DEBUG oslo_concurrency.lockutils [req-25f2cbaa-1a7e-4e31-a36d-fe7397b427cf req-9895d273-a65e-4196-a258-d53028fc56f5 service nova] Acquired lock "refresh_cache-1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1799.527455] env[68194]: DEBUG nova.network.neutron [req-25f2cbaa-1a7e-4e31-a36d-fe7397b427cf req-9895d273-a65e-4196-a258-d53028fc56f5 service nova] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Refreshing network info cache for port 9d836b07-d90b-45b5-a949-9fd57c60fc79 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1799.778269] env[68194]: DEBUG nova.network.neutron [req-25f2cbaa-1a7e-4e31-a36d-fe7397b427cf req-9895d273-a65e-4196-a258-d53028fc56f5 service nova] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Updated VIF entry in instance network info cache for port 9d836b07-d90b-45b5-a949-9fd57c60fc79. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1799.778803] env[68194]: DEBUG nova.network.neutron [req-25f2cbaa-1a7e-4e31-a36d-fe7397b427cf req-9895d273-a65e-4196-a258-d53028fc56f5 service nova] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Updating instance_info_cache with network_info: [{"id": "9d836b07-d90b-45b5-a949-9fd57c60fc79", "address": "fa:16:3e:9c:b4:e4", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.251", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap9d836b07-d9", "ovs_interfaceid": "9d836b07-d90b-45b5-a949-9fd57c60fc79", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1799.787902] env[68194]: DEBUG oslo_concurrency.lockutils [req-25f2cbaa-1a7e-4e31-a36d-fe7397b427cf req-9895d273-a65e-4196-a258-d53028fc56f5 service nova] Releasing lock "refresh_cache-1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1807.995474] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "4bcfda9d-e14b-441c-aebb-498dbc10513e" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1831.427101] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_power_states {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1831.450581] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Getting list of instances from cluster (obj){ [ 1831.450581] env[68194]: value = "domain-c8" [ 1831.450581] env[68194]: _type = "ClusterComputeResource" [ 1831.450581] env[68194]: } {{(pid=68194) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 1831.451839] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-81566ff6-7294-4cd7-96d5-d6cbefdb5968 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1831.468935] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Got total of 10 instances {{(pid=68194) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}} [ 1831.469118] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid bf9766c7-1495-4edd-92bd-06a0d036855e {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1831.469313] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid 7b430b72-05fa-49a6-8bbb-7c083cb96457 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1831.469473] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid 3da3b410-889a-42c5-9603-f92f689ab5b5 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1831.469630] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid 95be4f59-e835-4389-93ae-9814e97f8ef4 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1831.469776] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid 73abf0ba-016c-4536-afd3-f6c6960045fc {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1831.469925] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid 2b833505-f170-46ea-8d14-c449f88a7d4c {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1831.470093] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid d5bc98e3-9621-41bb-90a3-2f8e80c6928b {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1831.470247] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid 4bcfda9d-e14b-441c-aebb-498dbc10513e {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1831.470393] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid bd9479c5-a9f5-47a6-b731-f0bf4633b688 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1831.470538] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Triggering sync for uuid 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 {{(pid=68194) _sync_power_states /opt/stack/nova/nova/compute/manager.py:10321}} [ 1831.470856] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "bf9766c7-1495-4edd-92bd-06a0d036855e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1831.471098] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "7b430b72-05fa-49a6-8bbb-7c083cb96457" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1831.471376] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "3da3b410-889a-42c5-9603-f92f689ab5b5" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1831.471650] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "95be4f59-e835-4389-93ae-9814e97f8ef4" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1831.471825] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "73abf0ba-016c-4536-afd3-f6c6960045fc" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1831.472038] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "2b833505-f170-46ea-8d14-c449f88a7d4c" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1831.472245] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1831.472438] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "4bcfda9d-e14b-441c-aebb-498dbc10513e" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1831.472630] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1831.472816] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1834.820047] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "8f84a8dc-6908-463c-85e3-f5189e8ca71d" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1834.820390] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "8f84a8dc-6908-463c-85e3-f5189e8ca71d" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1840.749695] env[68194]: WARNING oslo_vmware.rw_handles [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1840.749695] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1840.749695] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1840.749695] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1840.749695] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1840.749695] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1840.749695] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1840.749695] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1840.749695] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1840.749695] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1840.749695] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1840.749695] env[68194]: ERROR oslo_vmware.rw_handles [ 1840.750670] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/15c7f120-4bd7-47dc-a0dd-323504b68629/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1840.752202] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1840.752463] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Copying Virtual Disk [datastore1] vmware_temp/15c7f120-4bd7-47dc-a0dd-323504b68629/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/15c7f120-4bd7-47dc-a0dd-323504b68629/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1840.752751] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-c9d277b1-6a75-4784-a278-25fc4382b2c9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1840.760452] env[68194]: DEBUG oslo_vmware.api [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Waiting for the task: (returnval){ [ 1840.760452] env[68194]: value = "task-3466934" [ 1840.760452] env[68194]: _type = "Task" [ 1840.760452] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1840.768654] env[68194]: DEBUG oslo_vmware.api [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Task: {'id': task-3466934, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.270913] env[68194]: DEBUG oslo_vmware.exceptions [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1841.271154] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1841.271687] env[68194]: ERROR nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1841.271687] env[68194]: Faults: ['InvalidArgument'] [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Traceback (most recent call last): [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] yield resources [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self.driver.spawn(context, instance, image_meta, [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._fetch_image_if_missing(context, vi) [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] image_cache(vi, tmp_image_ds_loc) [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] vm_util.copy_virtual_disk( [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] session._wait_for_task(vmdk_copy_task) [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.wait_for_task(task_ref) [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return evt.wait() [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] result = hub.switch() [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.greenlet.switch() [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self.f(*self.args, **self.kw) [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] raise exceptions.translate_fault(task_info.error) [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Faults: ['InvalidArgument'] [ 1841.271687] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] [ 1841.272781] env[68194]: INFO nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Terminating instance [ 1841.273528] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1841.273734] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1841.273966] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-20d01c41-2a07-4aa1-bd26-6f43f8aa353a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.276079] env[68194]: DEBUG nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1841.276279] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1841.276993] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61340bd1-6fad-4e49-9aa0-64c47108d1c8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.283678] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1841.283888] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-020392cb-ae67-4811-af96-e6801f64811a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.285965] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1841.286153] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1841.287085] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-6eafa203-e218-44a5-921b-96b57b4303ba {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.291489] env[68194]: DEBUG oslo_vmware.api [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Waiting for the task: (returnval){ [ 1841.291489] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]522302db-cbb9-d80e-bd9f-a5a0ddcc7aed" [ 1841.291489] env[68194]: _type = "Task" [ 1841.291489] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.299894] env[68194]: DEBUG oslo_vmware.api [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]522302db-cbb9-d80e-bd9f-a5a0ddcc7aed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.351948] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1841.352187] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1841.352375] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Deleting the datastore file [datastore1] bf9766c7-1495-4edd-92bd-06a0d036855e {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1841.352646] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-17ccfc7d-3d72-414e-9a22-3cfdeafe21af {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.358544] env[68194]: DEBUG oslo_vmware.api [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Waiting for the task: (returnval){ [ 1841.358544] env[68194]: value = "task-3466936" [ 1841.358544] env[68194]: _type = "Task" [ 1841.358544] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1841.367108] env[68194]: DEBUG oslo_vmware.api [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Task: {'id': task-3466936, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1841.415727] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1841.415919] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1841.801757] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1841.802132] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Creating directory with path [datastore1] vmware_temp/143941ab-5a94-46bf-babd-feaf5c694a49/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1841.802234] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-e56f1a76-c7d0-4042-a2e3-eda703cb8434 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.814062] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Created directory with path [datastore1] vmware_temp/143941ab-5a94-46bf-babd-feaf5c694a49/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1841.814314] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Fetch image to [datastore1] vmware_temp/143941ab-5a94-46bf-babd-feaf5c694a49/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1841.814515] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/143941ab-5a94-46bf-babd-feaf5c694a49/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1841.815265] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdf4caf4-c7f8-4935-a893-9963d1be3ce4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.821368] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d91624af-9e1c-4fc0-91ad-f2b067097bfc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.830021] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f22fe358-8c92-4d49-98da-e4356f1fbe74 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.859199] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-59e36c68-b020-41ed-8bf5-54ebd44f9d6d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.869191] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a8efdd70-7a4a-43e0-83d9-89696eebc6b4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1841.870791] env[68194]: DEBUG oslo_vmware.api [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Task: {'id': task-3466936, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075068} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1841.871025] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1841.871222] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1841.871387] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1841.871560] env[68194]: INFO nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1841.873593] env[68194]: DEBUG nova.compute.claims [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1841.873768] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1841.873977] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1841.890867] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1842.047467] env[68194]: DEBUG oslo_vmware.rw_handles [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/143941ab-5a94-46bf-babd-feaf5c694a49/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1842.106709] env[68194]: DEBUG oslo_vmware.rw_handles [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1842.106895] env[68194]: DEBUG oslo_vmware.rw_handles [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/143941ab-5a94-46bf-babd-feaf5c694a49/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1842.115803] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ea22f4f8-ebc2-4754-ae3d-b71f8a31418f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.123851] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-34aa68ee-7117-4b4f-8a13-531b7bcca9a2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.153985] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-daa389bb-8ffd-4189-9f82-24f60595c059 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.160936] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25c089f-374c-4536-86a9-99ca811e0199 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.173481] env[68194]: DEBUG nova.compute.provider_tree [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1842.182855] env[68194]: DEBUG nova.scheduler.client.report [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1842.196033] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.322s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1842.196540] env[68194]: ERROR nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1842.196540] env[68194]: Faults: ['InvalidArgument'] [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Traceback (most recent call last): [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self.driver.spawn(context, instance, image_meta, [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._fetch_image_if_missing(context, vi) [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] image_cache(vi, tmp_image_ds_loc) [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] vm_util.copy_virtual_disk( [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] session._wait_for_task(vmdk_copy_task) [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.wait_for_task(task_ref) [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return evt.wait() [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] result = hub.switch() [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.greenlet.switch() [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self.f(*self.args, **self.kw) [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] raise exceptions.translate_fault(task_info.error) [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Faults: ['InvalidArgument'] [ 1842.196540] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] [ 1842.197527] env[68194]: DEBUG nova.compute.utils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1842.198981] env[68194]: DEBUG nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Build of instance bf9766c7-1495-4edd-92bd-06a0d036855e was re-scheduled: A specified parameter was not correct: fileType [ 1842.198981] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1842.199127] env[68194]: DEBUG nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1842.199268] env[68194]: DEBUG nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1842.199487] env[68194]: DEBUG nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1842.199666] env[68194]: DEBUG nova.network.neutron [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1842.297944] env[68194]: DEBUG neutronclient.v2_0.client [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68194) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1842.298994] env[68194]: ERROR nova.compute.manager [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Traceback (most recent call last): [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self.driver.spawn(context, instance, image_meta, [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._fetch_image_if_missing(context, vi) [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] image_cache(vi, tmp_image_ds_loc) [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] vm_util.copy_virtual_disk( [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] session._wait_for_task(vmdk_copy_task) [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.wait_for_task(task_ref) [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return evt.wait() [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] result = hub.switch() [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.greenlet.switch() [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self.f(*self.args, **self.kw) [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] raise exceptions.translate_fault(task_info.error) [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Faults: ['InvalidArgument'] [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] During handling of the above exception, another exception occurred: [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Traceback (most recent call last): [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 2430, in _do_build_and_run_instance [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._build_and_run_instance(context, instance, image, [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 2722, in _build_and_run_instance [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] raise exception.RescheduledException( [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] nova.exception.RescheduledException: Build of instance bf9766c7-1495-4edd-92bd-06a0d036855e was re-scheduled: A specified parameter was not correct: fileType [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Faults: ['InvalidArgument'] [ 1842.298994] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] During handling of the above exception, another exception occurred: [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Traceback (most recent call last): [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] ret = obj(*args, **kwargs) [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] exception_handler_v20(status_code, error_body) [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] raise client_exc(message=error_message, [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Neutron server returns request_ids: ['req-25c68710-b2b2-43b6-b696-127ed1f2bfae'] [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] During handling of the above exception, another exception occurred: [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Traceback (most recent call last): [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 3019, in _cleanup_allocated_networks [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._deallocate_network(context, instance, requested_networks) [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self.network_api.deallocate_for_instance( [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] data = neutron.list_ports(**search_opts) [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] ret = obj(*args, **kwargs) [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.list('ports', self.ports_path, retrieve_all, [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] ret = obj(*args, **kwargs) [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] for r in self._pagination(collection, path, **params): [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] res = self.get(path, params=params) [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] ret = obj(*args, **kwargs) [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.retry_request("GET", action, body=body, [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] ret = obj(*args, **kwargs) [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1842.300200] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.do_request(method, action, body=body, [ 1842.301385] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.301385] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] ret = obj(*args, **kwargs) [ 1842.301385] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1842.301385] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._handle_fault_response(status_code, replybody, resp) [ 1842.301385] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 1842.301385] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] raise exception.Unauthorized() [ 1842.301385] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] nova.exception.Unauthorized: Not authorized. [ 1842.301385] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] [ 1842.376486] env[68194]: INFO nova.scheduler.client.report [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Deleted allocations for instance bf9766c7-1495-4edd-92bd-06a0d036855e [ 1842.396585] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e908384e-68fb-4754-9306-41cc8fc116aa tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Lock "bf9766c7-1495-4edd-92bd-06a0d036855e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 616.676s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1842.397669] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Lock "bf9766c7-1495-4edd-92bd-06a0d036855e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 420.744s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1842.397885] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Acquiring lock "bf9766c7-1495-4edd-92bd-06a0d036855e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1842.398136] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Lock "bf9766c7-1495-4edd-92bd-06a0d036855e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1842.398379] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Lock "bf9766c7-1495-4edd-92bd-06a0d036855e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1842.400143] env[68194]: INFO nova.compute.manager [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Terminating instance [ 1842.401842] env[68194]: DEBUG nova.compute.manager [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1842.402050] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1842.402518] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6d209ffc-55cf-44a1-8a0f-ce803d058eaa {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.406259] env[68194]: DEBUG nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1842.412624] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2655677-6d93-49d8-880b-b9baee4f0ea1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.441922] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bf9766c7-1495-4edd-92bd-06a0d036855e could not be found. [ 1842.442139] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1842.442317] env[68194]: INFO nova.compute.manager [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1842.442553] env[68194]: DEBUG oslo.service.loopingcall [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1842.444742] env[68194]: DEBUG nova.compute.manager [-] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1842.444846] env[68194]: DEBUG nova.network.neutron [-] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1842.458121] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1842.458365] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1842.459762] env[68194]: INFO nova.compute.claims [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1842.541637] env[68194]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68194) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1842.541945] env[68194]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-b77243dc-9b18-41c9-b69c-01eba10ff8f1'] [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1842.542677] env[68194]: ERROR oslo.service.loopingcall [ 1842.544036] env[68194]: ERROR nova.compute.manager [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1842.571437] env[68194]: ERROR nova.compute.manager [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Traceback (most recent call last): [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] ret = obj(*args, **kwargs) [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] exception_handler_v20(status_code, error_body) [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] raise client_exc(message=error_message, [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Neutron server returns request_ids: ['req-b77243dc-9b18-41c9-b69c-01eba10ff8f1'] [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] During handling of the above exception, another exception occurred: [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Traceback (most recent call last): [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._delete_instance(context, instance, bdms) [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._shutdown_instance(context, instance, bdms) [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._try_deallocate_network(context, instance, requested_networks) [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] with excutils.save_and_reraise_exception(): [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self.force_reraise() [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] raise self.value [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] _deallocate_network_with_retries() [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return evt.wait() [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] result = hub.switch() [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.greenlet.switch() [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] result = func(*self.args, **self.kw) [ 1842.571437] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] result = f(*args, **kwargs) [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._deallocate_network( [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self.network_api.deallocate_for_instance( [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] data = neutron.list_ports(**search_opts) [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] ret = obj(*args, **kwargs) [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.list('ports', self.ports_path, retrieve_all, [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] ret = obj(*args, **kwargs) [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] for r in self._pagination(collection, path, **params): [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] res = self.get(path, params=params) [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] ret = obj(*args, **kwargs) [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.retry_request("GET", action, body=body, [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] ret = obj(*args, **kwargs) [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] return self.do_request(method, action, body=body, [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] ret = obj(*args, **kwargs) [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] self._handle_fault_response(status_code, replybody, resp) [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1842.572699] env[68194]: ERROR nova.compute.manager [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] [ 1842.599613] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Lock "bf9766c7-1495-4edd-92bd-06a0d036855e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.202s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1842.600705] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "bf9766c7-1495-4edd-92bd-06a0d036855e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 11.130s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1842.600898] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] During sync_power_state the instance has a pending task (deleting). Skip. [ 1842.601089] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "bf9766c7-1495-4edd-92bd-06a0d036855e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1842.638354] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f673dc2-4637-4918-aa54-4a98308646b8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.646695] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d520b8d-9464-4877-afa8-31dace497eca {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.651822] env[68194]: INFO nova.compute.manager [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] [instance: bf9766c7-1495-4edd-92bd-06a0d036855e] Successfully reverted task state from None on failure for instance. [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server [None req-9b947bf3-5277-4622-b27b-4be67ba0d5d2 tempest-TenantUsagesTestJSON-1285342793 tempest-TenantUsagesTestJSON-1285342793-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-b77243dc-9b18-41c9-b69c-01eba10ff8f1'] [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1842.655546] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1842.657671] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1842.659243] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1842.659243] env[68194]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1842.659243] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1842.659243] env[68194]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1842.659243] env[68194]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1842.659243] env[68194]: ERROR oslo_messaging.rpc.server [ 1842.680075] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0c43c456-a665-4612-ac78-ae5f1139a535 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.687211] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d7bb7c7-6273-4eaf-8c6a-5cae209469eb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.700527] env[68194]: DEBUG nova.compute.provider_tree [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1842.708727] env[68194]: DEBUG nova.scheduler.client.report [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1842.721256] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.263s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1842.721720] env[68194]: DEBUG nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1842.754979] env[68194]: DEBUG nova.compute.utils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1842.756217] env[68194]: DEBUG nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1842.756402] env[68194]: DEBUG nova.network.neutron [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1842.768673] env[68194]: DEBUG nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1842.830280] env[68194]: DEBUG nova.policy [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '6a123085b0224f32ac7c2cf03b9d174d', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'cbb20a136093487abd1ee3965dd03518', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1842.833313] env[68194]: DEBUG nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1842.856996] env[68194]: DEBUG nova.virt.hardware [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1842.857319] env[68194]: DEBUG nova.virt.hardware [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1842.857482] env[68194]: DEBUG nova.virt.hardware [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1842.857661] env[68194]: DEBUG nova.virt.hardware [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1842.857835] env[68194]: DEBUG nova.virt.hardware [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1842.857953] env[68194]: DEBUG nova.virt.hardware [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1842.858209] env[68194]: DEBUG nova.virt.hardware [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1842.858376] env[68194]: DEBUG nova.virt.hardware [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1842.858547] env[68194]: DEBUG nova.virt.hardware [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1842.858711] env[68194]: DEBUG nova.virt.hardware [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1842.858884] env[68194]: DEBUG nova.virt.hardware [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1842.859843] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16386587-1dda-40d0-80c9-e4f30a87b50d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1842.871569] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-792d8441-e6f7-407d-b078-aaec3728c9f6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.115943] env[68194]: DEBUG nova.network.neutron [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Successfully created port: b65ba33d-8521-4d73-904a-dbb2de673c25 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1843.611178] env[68194]: DEBUG nova.compute.manager [req-b078ee96-d428-4f49-ba8d-30d248e17106 req-6073761f-24d2-4202-a1fd-14b69bb5be5c service nova] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Received event network-vif-plugged-b65ba33d-8521-4d73-904a-dbb2de673c25 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1843.611178] env[68194]: DEBUG oslo_concurrency.lockutils [req-b078ee96-d428-4f49-ba8d-30d248e17106 req-6073761f-24d2-4202-a1fd-14b69bb5be5c service nova] Acquiring lock "7142c793-cb3a-4bb0-87b6-c7fd5547f252-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1843.611178] env[68194]: DEBUG oslo_concurrency.lockutils [req-b078ee96-d428-4f49-ba8d-30d248e17106 req-6073761f-24d2-4202-a1fd-14b69bb5be5c service nova] Lock "7142c793-cb3a-4bb0-87b6-c7fd5547f252-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1843.611178] env[68194]: DEBUG oslo_concurrency.lockutils [req-b078ee96-d428-4f49-ba8d-30d248e17106 req-6073761f-24d2-4202-a1fd-14b69bb5be5c service nova] Lock "7142c793-cb3a-4bb0-87b6-c7fd5547f252-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1843.611178] env[68194]: DEBUG nova.compute.manager [req-b078ee96-d428-4f49-ba8d-30d248e17106 req-6073761f-24d2-4202-a1fd-14b69bb5be5c service nova] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] No waiting events found dispatching network-vif-plugged-b65ba33d-8521-4d73-904a-dbb2de673c25 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1843.611178] env[68194]: WARNING nova.compute.manager [req-b078ee96-d428-4f49-ba8d-30d248e17106 req-6073761f-24d2-4202-a1fd-14b69bb5be5c service nova] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Received unexpected event network-vif-plugged-b65ba33d-8521-4d73-904a-dbb2de673c25 for instance with vm_state building and task_state spawning. [ 1843.683099] env[68194]: DEBUG nova.network.neutron [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Successfully updated port: b65ba33d-8521-4d73-904a-dbb2de673c25 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1843.694644] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "refresh_cache-7142c793-cb3a-4bb0-87b6-c7fd5547f252" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1843.694803] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquired lock "refresh_cache-7142c793-cb3a-4bb0-87b6-c7fd5547f252" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1843.694953] env[68194]: DEBUG nova.network.neutron [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1843.732652] env[68194]: DEBUG nova.network.neutron [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1843.893143] env[68194]: DEBUG nova.network.neutron [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Updating instance_info_cache with network_info: [{"id": "b65ba33d-8521-4d73-904a-dbb2de673c25", "address": "fa:16:3e:6c:57:51", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb65ba33d-85", "ovs_interfaceid": "b65ba33d-8521-4d73-904a-dbb2de673c25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1843.905084] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Releasing lock "refresh_cache-7142c793-cb3a-4bb0-87b6-c7fd5547f252" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1843.905380] env[68194]: DEBUG nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Instance network_info: |[{"id": "b65ba33d-8521-4d73-904a-dbb2de673c25", "address": "fa:16:3e:6c:57:51", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb65ba33d-85", "ovs_interfaceid": "b65ba33d-8521-4d73-904a-dbb2de673c25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1843.905786] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:6c:57:51', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '2ff90ec9-3c7e-4e76-b409-fcf37fc588d8', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': 'b65ba33d-8521-4d73-904a-dbb2de673c25', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1843.913194] env[68194]: DEBUG oslo.service.loopingcall [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1843.913646] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1843.913874] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-faa2be8c-eec0-4a30-b2a9-9126c5796b9e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1843.933335] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1843.933335] env[68194]: value = "task-3466937" [ 1843.933335] env[68194]: _type = "Task" [ 1843.933335] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1843.941101] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466937, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.443791] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466937, 'name': CreateVM_Task, 'duration_secs': 0.321384} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1844.444019] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1844.444645] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1844.444812] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1844.445165] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1844.445422] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-7eafba6d-77c8-4549-9b77-8503775c5cdb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1844.449892] env[68194]: DEBUG oslo_vmware.api [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Waiting for the task: (returnval){ [ 1844.449892] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]520d2d2e-173d-6f44-ef8c-cb12840524b2" [ 1844.449892] env[68194]: _type = "Task" [ 1844.449892] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1844.457459] env[68194]: DEBUG oslo_vmware.api [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]520d2d2e-173d-6f44-ef8c-cb12840524b2, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1844.960459] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1844.960745] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1844.960941] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1845.417089] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1845.721913] env[68194]: DEBUG nova.compute.manager [req-d86f6445-f898-4b3d-b049-815881ad3bec req-d70dcb75-8729-49f2-b396-1382cd6dbf62 service nova] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Received event network-changed-b65ba33d-8521-4d73-904a-dbb2de673c25 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1845.721913] env[68194]: DEBUG nova.compute.manager [req-d86f6445-f898-4b3d-b049-815881ad3bec req-d70dcb75-8729-49f2-b396-1382cd6dbf62 service nova] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Refreshing instance network info cache due to event network-changed-b65ba33d-8521-4d73-904a-dbb2de673c25. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1845.721913] env[68194]: DEBUG oslo_concurrency.lockutils [req-d86f6445-f898-4b3d-b049-815881ad3bec req-d70dcb75-8729-49f2-b396-1382cd6dbf62 service nova] Acquiring lock "refresh_cache-7142c793-cb3a-4bb0-87b6-c7fd5547f252" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1845.722057] env[68194]: DEBUG oslo_concurrency.lockutils [req-d86f6445-f898-4b3d-b049-815881ad3bec req-d70dcb75-8729-49f2-b396-1382cd6dbf62 service nova] Acquired lock "refresh_cache-7142c793-cb3a-4bb0-87b6-c7fd5547f252" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1845.722134] env[68194]: DEBUG nova.network.neutron [req-d86f6445-f898-4b3d-b049-815881ad3bec req-d70dcb75-8729-49f2-b396-1382cd6dbf62 service nova] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Refreshing network info cache for port b65ba33d-8521-4d73-904a-dbb2de673c25 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1845.953468] env[68194]: DEBUG nova.network.neutron [req-d86f6445-f898-4b3d-b049-815881ad3bec req-d70dcb75-8729-49f2-b396-1382cd6dbf62 service nova] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Updated VIF entry in instance network info cache for port b65ba33d-8521-4d73-904a-dbb2de673c25. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1845.953820] env[68194]: DEBUG nova.network.neutron [req-d86f6445-f898-4b3d-b049-815881ad3bec req-d70dcb75-8729-49f2-b396-1382cd6dbf62 service nova] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Updating instance_info_cache with network_info: [{"id": "b65ba33d-8521-4d73-904a-dbb2de673c25", "address": "fa:16:3e:6c:57:51", "network": {"id": "cbdaa0f2-8d96-4126-a9c9-4b112484abc1", "bridge": "br-int", "label": "shared", "subnets": [{"cidr": "192.168.233.0/24", "dns": [], "gateway": {"address": "192.168.233.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.233.186", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.233.2"}}], "meta": {"injected": false, "tenant_id": "02796dfa696c46f98aba9ec6c16fb9fb", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "2ff90ec9-3c7e-4e76-b409-fcf37fc588d8", "external-id": "nsx-vlan-transportzone-475", "segmentation_id": 475, "bound_drivers": {"0": "nsxv3"}}, "devname": "tapb65ba33d-85", "ovs_interfaceid": "b65ba33d-8521-4d73-904a-dbb2de673c25", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1845.962882] env[68194]: DEBUG oslo_concurrency.lockutils [req-d86f6445-f898-4b3d-b049-815881ad3bec req-d70dcb75-8729-49f2-b396-1382cd6dbf62 service nova] Releasing lock "refresh_cache-7142c793-cb3a-4bb0-87b6-c7fd5547f252" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1846.416527] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1846.416780] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1847.416271] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1847.416579] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1849.416747] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1849.416958] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1849.416990] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1849.438308] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1849.438477] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1849.438613] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1849.438742] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1849.438868] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1849.439208] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1849.439208] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1849.439384] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1849.439384] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1849.439511] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1849.439581] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1849.440114] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1849.450785] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1849.451016] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1849.451195] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1849.451349] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1849.452460] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05baf40b-16b1-441b-8c42-63c5e2424b82 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.461378] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-94b2e755-ca82-4ddf-95c1-71ba87f0744c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.475561] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9c7790a7-040d-4e38-8c36-be125cfa1aaa {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.482444] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff04690f-2a36-4dc2-a552-dde6e43420a8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.511780] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180957MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1849.511780] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1849.511780] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1849.583190] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1849.583353] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3da3b410-889a-42c5-9603-f92f689ab5b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1849.583483] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 95be4f59-e835-4389-93ae-9814e97f8ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1849.583609] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 73abf0ba-016c-4536-afd3-f6c6960045fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1849.583732] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2b833505-f170-46ea-8d14-c449f88a7d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1849.583850] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1849.583967] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4bcfda9d-e14b-441c-aebb-498dbc10513e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1849.584099] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bd9479c5-a9f5-47a6-b731-f0bf4633b688 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1849.584220] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1849.584336] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1849.595631] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8f84a8dc-6908-463c-85e3-f5189e8ca71d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1849.608260] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5fb0537f-884d-421c-9f47-ec8fd7236e54 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1849.608495] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1849.608645] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1849.639916] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "5fb0537f-884d-421c-9f47-ec8fd7236e54" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1849.640164] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "5fb0537f-884d-421c-9f47-ec8fd7236e54" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1849.754425] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ce8c8801-4253-4191-bd71-8fb1540d229a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.762161] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-303990a9-3483-4b58-af33-39378d03b74c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.791760] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7264e5d7-39c5-4a4b-b579-c013ac6c773e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.798592] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b75a1e19-a4d1-43d1-9a9c-6805a010a4d2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1849.811160] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1849.819787] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1849.833865] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1849.834065] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.323s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1850.828584] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1863.098111] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1890.766049] env[68194]: WARNING oslo_vmware.rw_handles [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1890.766049] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1890.766049] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1890.766049] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1890.766049] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1890.766049] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1890.766049] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1890.766049] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1890.766049] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1890.766049] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1890.766049] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1890.766049] env[68194]: ERROR oslo_vmware.rw_handles [ 1890.766803] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/143941ab-5a94-46bf-babd-feaf5c694a49/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1890.768593] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1890.768848] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Copying Virtual Disk [datastore1] vmware_temp/143941ab-5a94-46bf-babd-feaf5c694a49/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/143941ab-5a94-46bf-babd-feaf5c694a49/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1890.769158] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-3046c558-0da5-424f-828a-b2e3b6fd74c0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1890.777079] env[68194]: DEBUG oslo_vmware.api [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Waiting for the task: (returnval){ [ 1890.777079] env[68194]: value = "task-3466938" [ 1890.777079] env[68194]: _type = "Task" [ 1890.777079] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1890.784724] env[68194]: DEBUG oslo_vmware.api [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Task: {'id': task-3466938, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.287758] env[68194]: DEBUG oslo_vmware.exceptions [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1891.288024] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1891.288619] env[68194]: ERROR nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1891.288619] env[68194]: Faults: ['InvalidArgument'] [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Traceback (most recent call last): [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] yield resources [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] self.driver.spawn(context, instance, image_meta, [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] self._fetch_image_if_missing(context, vi) [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] image_cache(vi, tmp_image_ds_loc) [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] vm_util.copy_virtual_disk( [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] session._wait_for_task(vmdk_copy_task) [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] return self.wait_for_task(task_ref) [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] return evt.wait() [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] result = hub.switch() [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] return self.greenlet.switch() [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] self.f(*self.args, **self.kw) [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] raise exceptions.translate_fault(task_info.error) [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Faults: ['InvalidArgument'] [ 1891.288619] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] [ 1891.289750] env[68194]: INFO nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Terminating instance [ 1891.290698] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1891.290926] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1891.291182] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-2cede045-2f76-41a2-8bd9-cbbfc122a836 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.293309] env[68194]: DEBUG nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1891.293503] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1891.294238] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dfe89963-192a-4b84-91d0-20c946756c7b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.301243] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1891.302160] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-fc202d7e-d1ab-4860-aebe-8a213fb338dc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.303474] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1891.303652] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1891.304538] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-8a5b9066-e46e-4963-977f-26152ba18c6f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.309705] env[68194]: DEBUG oslo_vmware.api [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Waiting for the task: (returnval){ [ 1891.309705] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52074783-e251-74fb-1926-e6f964ab7a1f" [ 1891.309705] env[68194]: _type = "Task" [ 1891.309705] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.319142] env[68194]: DEBUG oslo_vmware.api [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52074783-e251-74fb-1926-e6f964ab7a1f, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.371157] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1891.371369] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1891.371556] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Deleting the datastore file [datastore1] 7b430b72-05fa-49a6-8bbb-7c083cb96457 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1891.371819] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8c3ece5a-d0b0-4a9e-b2a2-0896c4f88151 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.377550] env[68194]: DEBUG oslo_vmware.api [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Waiting for the task: (returnval){ [ 1891.377550] env[68194]: value = "task-3466940" [ 1891.377550] env[68194]: _type = "Task" [ 1891.377550] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1891.384801] env[68194]: DEBUG oslo_vmware.api [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Task: {'id': task-3466940, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1891.820043] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1891.820043] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Creating directory with path [datastore1] vmware_temp/731c2132-8311-443a-bb63-9e1d9365eef2/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1891.820480] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-74b61f91-6a91-4e46-bfaa-121a784dd58d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.831422] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Created directory with path [datastore1] vmware_temp/731c2132-8311-443a-bb63-9e1d9365eef2/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1891.831589] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Fetch image to [datastore1] vmware_temp/731c2132-8311-443a-bb63-9e1d9365eef2/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1891.831712] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/731c2132-8311-443a-bb63-9e1d9365eef2/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1891.832413] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bd778304-f3d4-49c6-9e4b-10fdc0c8eb99 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.838512] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ff5baf91-1ac9-4721-ab45-c293030a83d0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.847146] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4a34aa90-33ef-48e5-82b8-17546633fdde {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.877656] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-53d6db35-5211-4157-8bb1-5ce4fbcbff9b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.888400] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-9167f902-bec9-48ee-8847-224a1fe71be6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1891.889997] env[68194]: DEBUG oslo_vmware.api [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Task: {'id': task-3466940, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.075035} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1891.890238] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1891.890418] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1891.890584] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1891.890752] env[68194]: INFO nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1891.892864] env[68194]: DEBUG nova.compute.claims [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1891.893067] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1891.893296] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1891.910342] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1892.043893] env[68194]: DEBUG oslo_vmware.rw_handles [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/731c2132-8311-443a-bb63-9e1d9365eef2/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1892.103791] env[68194]: DEBUG oslo_vmware.rw_handles [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1892.103996] env[68194]: DEBUG oslo_vmware.rw_handles [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/731c2132-8311-443a-bb63-9e1d9365eef2/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1892.136659] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b25dfbcb-a1eb-4a22-80bb-e66a1b078132 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.144407] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aaa23053-ce41-4770-aecf-1214a6aaa390 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.172875] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5692001-a91b-4ddb-b890-e9ace3a7c2d6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.179773] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e66e0e3-f197-45a1-87d8-3e66f46d6198 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.193455] env[68194]: DEBUG nova.compute.provider_tree [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1892.203345] env[68194]: DEBUG nova.scheduler.client.report [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1892.217491] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.324s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1892.217901] env[68194]: ERROR nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1892.217901] env[68194]: Faults: ['InvalidArgument'] [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Traceback (most recent call last): [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] self.driver.spawn(context, instance, image_meta, [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] self._fetch_image_if_missing(context, vi) [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] image_cache(vi, tmp_image_ds_loc) [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] vm_util.copy_virtual_disk( [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] session._wait_for_task(vmdk_copy_task) [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] return self.wait_for_task(task_ref) [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] return evt.wait() [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] result = hub.switch() [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] return self.greenlet.switch() [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] self.f(*self.args, **self.kw) [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] raise exceptions.translate_fault(task_info.error) [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Faults: ['InvalidArgument'] [ 1892.217901] env[68194]: ERROR nova.compute.manager [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] [ 1892.218884] env[68194]: DEBUG nova.compute.utils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1892.220015] env[68194]: DEBUG nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Build of instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 was re-scheduled: A specified parameter was not correct: fileType [ 1892.220015] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1892.220402] env[68194]: DEBUG nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1892.220582] env[68194]: DEBUG nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1892.220752] env[68194]: DEBUG nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1892.220947] env[68194]: DEBUG nova.network.neutron [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1892.520456] env[68194]: DEBUG nova.network.neutron [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.531320] env[68194]: INFO nova.compute.manager [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Took 0.31 seconds to deallocate network for instance. [ 1892.623476] env[68194]: INFO nova.scheduler.client.report [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Deleted allocations for instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 [ 1892.645901] env[68194]: DEBUG oslo_concurrency.lockutils [None req-56edcc4d-6348-4c07-9fe9-ef8dd3827c67 tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Lock "7b430b72-05fa-49a6-8bbb-7c083cb96457" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 661.334s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1892.647176] env[68194]: DEBUG oslo_concurrency.lockutils [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Lock "7b430b72-05fa-49a6-8bbb-7c083cb96457" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 464.984s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1892.647407] env[68194]: DEBUG oslo_concurrency.lockutils [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Acquiring lock "7b430b72-05fa-49a6-8bbb-7c083cb96457-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1892.647540] env[68194]: DEBUG oslo_concurrency.lockutils [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Lock "7b430b72-05fa-49a6-8bbb-7c083cb96457-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1892.647710] env[68194]: DEBUG oslo_concurrency.lockutils [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Lock "7b430b72-05fa-49a6-8bbb-7c083cb96457-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1892.649707] env[68194]: INFO nova.compute.manager [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Terminating instance [ 1892.651372] env[68194]: DEBUG nova.compute.manager [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1892.651563] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1892.652053] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-511312f5-6466-45a0-9168-4bc6c3384d4c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.661538] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12ff95a2-9691-4a91-abe6-8204dde60a5a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.672599] env[68194]: DEBUG nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1892.693708] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7b430b72-05fa-49a6-8bbb-7c083cb96457 could not be found. [ 1892.693919] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1892.694112] env[68194]: INFO nova.compute.manager [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1892.694381] env[68194]: DEBUG oslo.service.loopingcall [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1892.694624] env[68194]: DEBUG nova.compute.manager [-] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1892.694721] env[68194]: DEBUG nova.network.neutron [-] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1892.717299] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1892.717536] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1892.719021] env[68194]: INFO nova.compute.claims [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1892.721527] env[68194]: DEBUG nova.network.neutron [-] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1892.729976] env[68194]: INFO nova.compute.manager [-] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] Took 0.03 seconds to deallocate network for instance. [ 1892.829454] env[68194]: DEBUG oslo_concurrency.lockutils [None req-949ec717-1dbf-41d5-8f35-1153e3d4371d tempest-ServerActionsTestJSON-1699304975 tempest-ServerActionsTestJSON-1699304975-project-member] Lock "7b430b72-05fa-49a6-8bbb-7c083cb96457" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.182s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1892.830290] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "7b430b72-05fa-49a6-8bbb-7c083cb96457" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 61.359s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1892.830478] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7b430b72-05fa-49a6-8bbb-7c083cb96457] During sync_power_state the instance has a pending task (deleting). Skip. [ 1892.830650] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "7b430b72-05fa-49a6-8bbb-7c083cb96457" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1892.900389] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-472170b1-9a41-40a0-821b-3bc6149e99ae {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.909199] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-21fc7488-e188-43d2-aa2f-9b00099882a1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.938856] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bde77ebb-5cd4-455e-8b2b-d0ca96ba5f4f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.946237] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-31dd0836-870e-4d7b-a07d-419fe6c0890a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1892.959026] env[68194]: DEBUG nova.compute.provider_tree [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1892.968324] env[68194]: DEBUG nova.scheduler.client.report [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1892.982393] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.265s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1892.982855] env[68194]: DEBUG nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1893.014928] env[68194]: DEBUG nova.compute.utils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1893.016205] env[68194]: DEBUG nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1893.016385] env[68194]: DEBUG nova.network.neutron [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1893.024215] env[68194]: DEBUG nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1893.071616] env[68194]: DEBUG nova.policy [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '628bc4df46494159a5e5a4b71770f64f', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'c7fe2744a0c14564ae1dea9f2653bc4a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1893.087699] env[68194]: DEBUG nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1893.114384] env[68194]: DEBUG nova.virt.hardware [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1893.114645] env[68194]: DEBUG nova.virt.hardware [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1893.114814] env[68194]: DEBUG nova.virt.hardware [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1893.115012] env[68194]: DEBUG nova.virt.hardware [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1893.115174] env[68194]: DEBUG nova.virt.hardware [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1893.115331] env[68194]: DEBUG nova.virt.hardware [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1893.115545] env[68194]: DEBUG nova.virt.hardware [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1893.115717] env[68194]: DEBUG nova.virt.hardware [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1893.115887] env[68194]: DEBUG nova.virt.hardware [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1893.116099] env[68194]: DEBUG nova.virt.hardware [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1893.116307] env[68194]: DEBUG nova.virt.hardware [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1893.117199] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1cbd6894-fce8-40af-bf9e-db16c9e22825 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.125421] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-effc2fd2-4d6c-436c-88ee-a1b0705f7ac0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1893.394046] env[68194]: DEBUG nova.network.neutron [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Successfully created port: 23c72376-c494-4b21-b373-10f6ece91501 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1894.096545] env[68194]: DEBUG nova.network.neutron [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Successfully updated port: 23c72376-c494-4b21-b373-10f6ece91501 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1894.109808] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "refresh_cache-8f84a8dc-6908-463c-85e3-f5189e8ca71d" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1894.109808] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquired lock "refresh_cache-8f84a8dc-6908-463c-85e3-f5189e8ca71d" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1894.109808] env[68194]: DEBUG nova.network.neutron [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1894.150226] env[68194]: DEBUG nova.network.neutron [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1894.302145] env[68194]: DEBUG nova.network.neutron [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Updating instance_info_cache with network_info: [{"id": "23c72376-c494-4b21-b373-10f6ece91501", "address": "fa:16:3e:a8:b6:b1", "network": {"id": "0ba2ed75-b752-41fa-b27d-5a564f3d942e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2075431146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7fe2744a0c14564ae1dea9f2653bc4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47499d09-8010-4d02-ac96-4f057c104692", "external-id": "nsx-vlan-transportzone-14", "segmentation_id": 14, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23c72376-c4", "ovs_interfaceid": "23c72376-c494-4b21-b373-10f6ece91501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.315159] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Releasing lock "refresh_cache-8f84a8dc-6908-463c-85e3-f5189e8ca71d" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1894.315438] env[68194]: DEBUG nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Instance network_info: |[{"id": "23c72376-c494-4b21-b373-10f6ece91501", "address": "fa:16:3e:a8:b6:b1", "network": {"id": "0ba2ed75-b752-41fa-b27d-5a564f3d942e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2075431146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7fe2744a0c14564ae1dea9f2653bc4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47499d09-8010-4d02-ac96-4f057c104692", "external-id": "nsx-vlan-transportzone-14", "segmentation_id": 14, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23c72376-c4", "ovs_interfaceid": "23c72376-c494-4b21-b373-10f6ece91501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1894.315818] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a8:b6:b1', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '47499d09-8010-4d02-ac96-4f057c104692', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '23c72376-c494-4b21-b373-10f6ece91501', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1894.323241] env[68194]: DEBUG oslo.service.loopingcall [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1894.323681] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1894.323950] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-89a024e7-dcc7-48ea-aa7f-f191130ca538 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.344167] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1894.344167] env[68194]: value = "task-3466941" [ 1894.344167] env[68194]: _type = "Task" [ 1894.344167] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.351931] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466941, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1894.559941] env[68194]: DEBUG nova.compute.manager [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Received event network-vif-plugged-23c72376-c494-4b21-b373-10f6ece91501 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1894.560303] env[68194]: DEBUG oslo_concurrency.lockutils [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] Acquiring lock "8f84a8dc-6908-463c-85e3-f5189e8ca71d-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1894.560648] env[68194]: DEBUG oslo_concurrency.lockutils [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] Lock "8f84a8dc-6908-463c-85e3-f5189e8ca71d-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1894.560938] env[68194]: DEBUG oslo_concurrency.lockutils [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] Lock "8f84a8dc-6908-463c-85e3-f5189e8ca71d-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1894.561245] env[68194]: DEBUG nova.compute.manager [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] No waiting events found dispatching network-vif-plugged-23c72376-c494-4b21-b373-10f6ece91501 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1894.561540] env[68194]: WARNING nova.compute.manager [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Received unexpected event network-vif-plugged-23c72376-c494-4b21-b373-10f6ece91501 for instance with vm_state building and task_state spawning. [ 1894.561822] env[68194]: DEBUG nova.compute.manager [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Received event network-changed-23c72376-c494-4b21-b373-10f6ece91501 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1894.562115] env[68194]: DEBUG nova.compute.manager [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Refreshing instance network info cache due to event network-changed-23c72376-c494-4b21-b373-10f6ece91501. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1894.562438] env[68194]: DEBUG oslo_concurrency.lockutils [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] Acquiring lock "refresh_cache-8f84a8dc-6908-463c-85e3-f5189e8ca71d" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1894.562697] env[68194]: DEBUG oslo_concurrency.lockutils [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] Acquired lock "refresh_cache-8f84a8dc-6908-463c-85e3-f5189e8ca71d" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1894.562972] env[68194]: DEBUG nova.network.neutron [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Refreshing network info cache for port 23c72376-c494-4b21-b373-10f6ece91501 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1894.822470] env[68194]: DEBUG nova.network.neutron [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Updated VIF entry in instance network info cache for port 23c72376-c494-4b21-b373-10f6ece91501. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1894.822895] env[68194]: DEBUG nova.network.neutron [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Updating instance_info_cache with network_info: [{"id": "23c72376-c494-4b21-b373-10f6ece91501", "address": "fa:16:3e:a8:b6:b1", "network": {"id": "0ba2ed75-b752-41fa-b27d-5a564f3d942e", "bridge": "br-int", "label": "tempest-ServerDiskConfigTestJSON-2075431146-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.14", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "c7fe2744a0c14564ae1dea9f2653bc4a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "47499d09-8010-4d02-ac96-4f057c104692", "external-id": "nsx-vlan-transportzone-14", "segmentation_id": 14, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap23c72376-c4", "ovs_interfaceid": "23c72376-c494-4b21-b373-10f6ece91501", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1894.832246] env[68194]: DEBUG oslo_concurrency.lockutils [req-c8600d5c-2d07-4aa4-ac62-e8fe7f4f148e req-f0b222ac-8f74-45b5-a5d2-18b55337d22a service nova] Releasing lock "refresh_cache-8f84a8dc-6908-463c-85e3-f5189e8ca71d" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1894.854570] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466941, 'name': CreateVM_Task, 'duration_secs': 0.287964} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1894.854671] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1894.861171] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1894.861346] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1894.861733] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1894.861987] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-375801e2-457a-442a-af4a-d855d0c1f1d2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1894.866791] env[68194]: DEBUG oslo_vmware.api [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for the task: (returnval){ [ 1894.866791] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52eda98e-4dbd-6434-6e64-b01e23e98ea9" [ 1894.866791] env[68194]: _type = "Task" [ 1894.866791] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1894.874570] env[68194]: DEBUG oslo_vmware.api [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52eda98e-4dbd-6434-6e64-b01e23e98ea9, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1895.377349] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1895.377754] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1895.377883] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1902.415567] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1902.415888] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1905.047669] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "7142c793-cb3a-4bb0-87b6-c7fd5547f252" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1905.416303] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1906.416836] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1906.417240] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1907.411816] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1907.433668] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1909.416580] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1909.416912] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1909.416912] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1909.437042] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1909.437042] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1909.437220] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1909.437220] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1909.437407] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1909.437547] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1909.437609] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1909.437698] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1909.437815] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1909.437934] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1909.438069] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1909.438609] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1909.438779] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1909.451137] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1909.451137] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1909.451137] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1909.451339] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1909.452377] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5de59ed2-698e-4c0f-a19e-a3ca7e11a511 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.460879] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-12bc1eff-0f36-42b5-9495-6dd8a3df1442 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.474614] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a5551d1b-e060-40e1-ba1b-29cda0afbba5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.480958] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f82559b5-5983-4ad8-ac99-3511fd8127c9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.510098] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180959MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1909.510256] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1909.510448] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1909.579033] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 3da3b410-889a-42c5-9603-f92f689ab5b5 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1909.579203] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 95be4f59-e835-4389-93ae-9814e97f8ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1909.579333] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 73abf0ba-016c-4536-afd3-f6c6960045fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1909.579457] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2b833505-f170-46ea-8d14-c449f88a7d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1909.579608] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1909.579732] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4bcfda9d-e14b-441c-aebb-498dbc10513e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1909.579848] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bd9479c5-a9f5-47a6-b731-f0bf4633b688 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1909.579963] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1909.580090] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1909.580204] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8f84a8dc-6908-463c-85e3-f5189e8ca71d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1909.590942] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5fb0537f-884d-421c-9f47-ec8fd7236e54 has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 1909.591167] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1909.591312] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1909.712474] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8ace3981-c6c0-435b-ac9d-dccb30d5beb1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.720019] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-da307053-05dc-4c9c-98b5-6a284d3fbc81 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.750456] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af231447-0882-43da-b7e6-61e7e95fcc2f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.757278] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a7f7349-cfea-4cea-9190-e6a62cf15027 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1909.769598] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1909.777857] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1909.790972] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1909.791169] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.281s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1910.785996] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1940.784051] env[68194]: WARNING oslo_vmware.rw_handles [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1940.784051] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1940.784051] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1940.784051] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1940.784051] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1940.784051] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1940.784051] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1940.784051] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1940.784051] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1940.784051] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1940.784051] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1940.784051] env[68194]: ERROR oslo_vmware.rw_handles [ 1940.787134] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/731c2132-8311-443a-bb63-9e1d9365eef2/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1940.788283] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1940.788707] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Copying Virtual Disk [datastore1] vmware_temp/731c2132-8311-443a-bb63-9e1d9365eef2/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/731c2132-8311-443a-bb63-9e1d9365eef2/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1940.791079] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-f7021a9b-bdb2-4838-94ee-251172db707d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1940.797791] env[68194]: DEBUG oslo_vmware.api [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Waiting for the task: (returnval){ [ 1940.797791] env[68194]: value = "task-3466942" [ 1940.797791] env[68194]: _type = "Task" [ 1940.797791] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1940.806140] env[68194]: DEBUG oslo_vmware.api [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Task: {'id': task-3466942, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.309033] env[68194]: DEBUG oslo_vmware.exceptions [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1941.309033] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1941.309735] env[68194]: ERROR nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1941.309735] env[68194]: Faults: ['InvalidArgument'] [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Traceback (most recent call last): [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] yield resources [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] self.driver.spawn(context, instance, image_meta, [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] self._fetch_image_if_missing(context, vi) [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] image_cache(vi, tmp_image_ds_loc) [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] vm_util.copy_virtual_disk( [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] session._wait_for_task(vmdk_copy_task) [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] return self.wait_for_task(task_ref) [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] return evt.wait() [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] result = hub.switch() [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] return self.greenlet.switch() [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] self.f(*self.args, **self.kw) [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] raise exceptions.translate_fault(task_info.error) [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Faults: ['InvalidArgument'] [ 1941.309735] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] [ 1941.310762] env[68194]: INFO nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Terminating instance [ 1941.312293] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1941.312552] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1941.312847] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5bedba0a-b251-4c31-8fda-534880819d99 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.316708] env[68194]: DEBUG nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1941.316959] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1941.317741] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2d462be7-0e23-4bd4-a912-0e5469345fca {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.324506] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1941.324802] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-d48ecc1e-4297-48e7-bcca-d66d0e212b19 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.327081] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1941.327338] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1941.328449] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0ccb74bd-7d2d-45f6-bb11-2b26087760d7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.333352] env[68194]: DEBUG oslo_vmware.api [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Waiting for the task: (returnval){ [ 1941.333352] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5296e2fe-c4dd-42a8-668d-9397237c8194" [ 1941.333352] env[68194]: _type = "Task" [ 1941.333352] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.341347] env[68194]: DEBUG oslo_vmware.api [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5296e2fe-c4dd-42a8-668d-9397237c8194, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.387882] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1941.388199] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1941.388452] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Deleting the datastore file [datastore1] 3da3b410-889a-42c5-9603-f92f689ab5b5 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1941.388801] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-ccd53cfc-673f-4c38-9438-18f799fbcb18 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.395235] env[68194]: DEBUG oslo_vmware.api [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Waiting for the task: (returnval){ [ 1941.395235] env[68194]: value = "task-3466944" [ 1941.395235] env[68194]: _type = "Task" [ 1941.395235] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1941.402679] env[68194]: DEBUG oslo_vmware.api [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Task: {'id': task-3466944, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1941.844225] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1941.844525] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Creating directory with path [datastore1] vmware_temp/d3435d9e-05dd-47ff-9374-b11d94c40a48/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1941.844764] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-49e56ea8-c7ef-43f1-b24e-d434404fdb12 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.855664] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Created directory with path [datastore1] vmware_temp/d3435d9e-05dd-47ff-9374-b11d94c40a48/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1941.855854] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Fetch image to [datastore1] vmware_temp/d3435d9e-05dd-47ff-9374-b11d94c40a48/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1941.856038] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/d3435d9e-05dd-47ff-9374-b11d94c40a48/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1941.856745] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2a3ecf73-fc5e-4171-ba9c-4e5458015912 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.863288] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af5496ba-bc77-4e2d-8c00-22278e05f1ec {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.871942] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-50628dea-b70b-4e0e-9001-ec53ac43ac25 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.904174] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e1e37e5-045d-4399-a6e9-1ab2b23c47b7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.912374] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-2d5a92e7-8f8d-4b86-9489-c5048efe7f28 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1941.913975] env[68194]: DEBUG oslo_vmware.api [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Task: {'id': task-3466944, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.079341} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1941.914229] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1941.914410] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1941.914602] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1941.914756] env[68194]: INFO nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1941.916850] env[68194]: DEBUG nova.compute.claims [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1941.917051] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1941.917278] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1941.933752] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1942.048940] env[68194]: DEBUG oslo_vmware.rw_handles [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d3435d9e-05dd-47ff-9374-b11d94c40a48/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1942.108260] env[68194]: DEBUG oslo_vmware.rw_handles [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1942.108447] env[68194]: DEBUG oslo_vmware.rw_handles [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/d3435d9e-05dd-47ff-9374-b11d94c40a48/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1942.149626] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7da43dea-5d3b-4ee4-9fd9-664f917faec7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.157490] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-766741e2-c4f9-4df4-90cf-46c684bb74b0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.186303] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-77885a13-678d-4f8e-aa48-46f2790d91e4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.192892] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a2b68f0-6fd8-4842-ae0a-9508ecba4696 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.205269] env[68194]: DEBUG nova.compute.provider_tree [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1942.214753] env[68194]: DEBUG nova.scheduler.client.report [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1942.227407] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.310s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1942.227912] env[68194]: ERROR nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1942.227912] env[68194]: Faults: ['InvalidArgument'] [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Traceback (most recent call last): [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] self.driver.spawn(context, instance, image_meta, [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] self._fetch_image_if_missing(context, vi) [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] image_cache(vi, tmp_image_ds_loc) [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] vm_util.copy_virtual_disk( [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] session._wait_for_task(vmdk_copy_task) [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] return self.wait_for_task(task_ref) [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] return evt.wait() [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] result = hub.switch() [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] return self.greenlet.switch() [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] self.f(*self.args, **self.kw) [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] raise exceptions.translate_fault(task_info.error) [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Faults: ['InvalidArgument'] [ 1942.227912] env[68194]: ERROR nova.compute.manager [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] [ 1942.228829] env[68194]: DEBUG nova.compute.utils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1942.229948] env[68194]: DEBUG nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Build of instance 3da3b410-889a-42c5-9603-f92f689ab5b5 was re-scheduled: A specified parameter was not correct: fileType [ 1942.229948] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1942.230360] env[68194]: DEBUG nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1942.230538] env[68194]: DEBUG nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1942.230719] env[68194]: DEBUG nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1942.230896] env[68194]: DEBUG nova.network.neutron [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1942.541418] env[68194]: DEBUG nova.network.neutron [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.552712] env[68194]: INFO nova.compute.manager [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Took 0.32 seconds to deallocate network for instance. [ 1942.646893] env[68194]: INFO nova.scheduler.client.report [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Deleted allocations for instance 3da3b410-889a-42c5-9603-f92f689ab5b5 [ 1942.667779] env[68194]: DEBUG oslo_concurrency.lockutils [None req-5e4c704a-8904-4c9f-be3d-13337cdc7f34 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Lock "3da3b410-889a-42c5-9603-f92f689ab5b5" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 674.216s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1942.668939] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Lock "3da3b410-889a-42c5-9603-f92f689ab5b5" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 477.919s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1942.669206] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Acquiring lock "3da3b410-889a-42c5-9603-f92f689ab5b5-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1942.669547] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Lock "3da3b410-889a-42c5-9603-f92f689ab5b5-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1942.669646] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Lock "3da3b410-889a-42c5-9603-f92f689ab5b5-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1942.672260] env[68194]: INFO nova.compute.manager [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Terminating instance [ 1942.674201] env[68194]: DEBUG nova.compute.manager [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1942.674484] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1942.674791] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-c0a77d57-637f-403c-b645-e86ba71e4887 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.681449] env[68194]: DEBUG nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 1942.687730] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1d06ecf1-5c6b-4d19-b2da-e1366c653000 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.717142] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 3da3b410-889a-42c5-9603-f92f689ab5b5 could not be found. [ 1942.717336] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1942.717552] env[68194]: INFO nova.compute.manager [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1942.717797] env[68194]: DEBUG oslo.service.loopingcall [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1942.720078] env[68194]: DEBUG nova.compute.manager [-] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1942.720197] env[68194]: DEBUG nova.network.neutron [-] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1942.733668] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1942.733893] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1942.735326] env[68194]: INFO nova.compute.claims [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 1942.744781] env[68194]: DEBUG nova.network.neutron [-] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1942.755211] env[68194]: INFO nova.compute.manager [-] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] Took 0.03 seconds to deallocate network for instance. [ 1942.840879] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2880e7fc-fe43-45af-843b-37b9edf1da70 tempest-ServerActionsTestOtherB-1269883402 tempest-ServerActionsTestOtherB-1269883402-project-member] Lock "3da3b410-889a-42c5-9603-f92f689ab5b5" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.172s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1942.841637] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "3da3b410-889a-42c5-9603-f92f689ab5b5" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 111.370s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1942.841849] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 3da3b410-889a-42c5-9603-f92f689ab5b5] During sync_power_state the instance has a pending task (deleting). Skip. [ 1942.842107] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "3da3b410-889a-42c5-9603-f92f689ab5b5" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1942.909483] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-808fb13f-20bc-4ccd-87bd-db6b15bb06a3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.917160] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bec0652-7f8f-4cd7-9f94-fd6de70dabeb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.947626] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-43ab6ba5-3fbd-4d31-b5d7-22c6841673da {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.954309] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1bb879d6-a6da-464c-926c-0560ea7ebd1c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1942.967422] env[68194]: DEBUG nova.compute.provider_tree [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1942.976183] env[68194]: DEBUG nova.scheduler.client.report [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1942.990781] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.257s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1942.991260] env[68194]: DEBUG nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 1943.021075] env[68194]: DEBUG nova.compute.utils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 1943.022505] env[68194]: DEBUG nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 1943.022692] env[68194]: DEBUG nova.network.neutron [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 1943.031290] env[68194]: DEBUG nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 1943.086072] env[68194]: DEBUG nova.policy [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '905b97edce374ad5a240d61220f66f80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05181674899f44e7bb6d234643c3e6b6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 1943.094637] env[68194]: DEBUG nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 1943.121023] env[68194]: DEBUG nova.virt.hardware [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 1943.121023] env[68194]: DEBUG nova.virt.hardware [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 1943.121233] env[68194]: DEBUG nova.virt.hardware [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 1943.121407] env[68194]: DEBUG nova.virt.hardware [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 1943.121583] env[68194]: DEBUG nova.virt.hardware [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 1943.121737] env[68194]: DEBUG nova.virt.hardware [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 1943.121953] env[68194]: DEBUG nova.virt.hardware [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 1943.122134] env[68194]: DEBUG nova.virt.hardware [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 1943.122314] env[68194]: DEBUG nova.virt.hardware [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 1943.122512] env[68194]: DEBUG nova.virt.hardware [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 1943.122696] env[68194]: DEBUG nova.virt.hardware [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 1943.123570] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-68cb5222-d28f-4b73-ba1f-a3a8b9bf8bee {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.132415] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-855e051f-5daa-4c86-9285-a63373ee22c0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1943.359126] env[68194]: DEBUG nova.network.neutron [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Successfully created port: 6b397a48-6e2b-4a18-894b-bcf4a35d1a67 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 1943.899694] env[68194]: DEBUG nova.network.neutron [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Successfully updated port: 6b397a48-6e2b-4a18-894b-bcf4a35d1a67 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 1943.914934] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "refresh_cache-5fb0537f-884d-421c-9f47-ec8fd7236e54" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1943.915354] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired lock "refresh_cache-5fb0537f-884d-421c-9f47-ec8fd7236e54" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1943.915354] env[68194]: DEBUG nova.network.neutron [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1943.971316] env[68194]: DEBUG nova.network.neutron [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1944.119754] env[68194]: DEBUG nova.network.neutron [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Updating instance_info_cache with network_info: [{"id": "6b397a48-6e2b-4a18-894b-bcf4a35d1a67", "address": "fa:16:3e:ba:bb:bc", "network": {"id": "327b172e-477f-4070-9bcb-c1216237bde9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-980534088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05181674899f44e7bb6d234643c3e6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b397a48-6e", "ovs_interfaceid": "6b397a48-6e2b-4a18-894b-bcf4a35d1a67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1944.133124] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Releasing lock "refresh_cache-5fb0537f-884d-421c-9f47-ec8fd7236e54" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1944.133433] env[68194]: DEBUG nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Instance network_info: |[{"id": "6b397a48-6e2b-4a18-894b-bcf4a35d1a67", "address": "fa:16:3e:ba:bb:bc", "network": {"id": "327b172e-477f-4070-9bcb-c1216237bde9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-980534088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05181674899f44e7bb6d234643c3e6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b397a48-6e", "ovs_interfaceid": "6b397a48-6e2b-4a18-894b-bcf4a35d1a67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 1944.133827] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:ba:bb:bc', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3291573-fad8-48cc-a965-c3554e7cee4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '6b397a48-6e2b-4a18-894b-bcf4a35d1a67', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 1944.141476] env[68194]: DEBUG oslo.service.loopingcall [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1944.141937] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 1944.142182] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-490e63b8-0d3f-4cd0-ae02-3ae4b4bc89b3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.163623] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 1944.163623] env[68194]: value = "task-3466945" [ 1944.163623] env[68194]: _type = "Task" [ 1944.163623] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.171400] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466945, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.606145] env[68194]: DEBUG nova.compute.manager [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Received event network-vif-plugged-6b397a48-6e2b-4a18-894b-bcf4a35d1a67 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1944.606337] env[68194]: DEBUG oslo_concurrency.lockutils [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] Acquiring lock "5fb0537f-884d-421c-9f47-ec8fd7236e54-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1944.606554] env[68194]: DEBUG oslo_concurrency.lockutils [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] Lock "5fb0537f-884d-421c-9f47-ec8fd7236e54-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1944.606758] env[68194]: DEBUG oslo_concurrency.lockutils [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] Lock "5fb0537f-884d-421c-9f47-ec8fd7236e54-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1944.606881] env[68194]: DEBUG nova.compute.manager [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] No waiting events found dispatching network-vif-plugged-6b397a48-6e2b-4a18-894b-bcf4a35d1a67 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 1944.607062] env[68194]: WARNING nova.compute.manager [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Received unexpected event network-vif-plugged-6b397a48-6e2b-4a18-894b-bcf4a35d1a67 for instance with vm_state building and task_state spawning. [ 1944.607226] env[68194]: DEBUG nova.compute.manager [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Received event network-changed-6b397a48-6e2b-4a18-894b-bcf4a35d1a67 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 1944.607382] env[68194]: DEBUG nova.compute.manager [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Refreshing instance network info cache due to event network-changed-6b397a48-6e2b-4a18-894b-bcf4a35d1a67. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 1944.607567] env[68194]: DEBUG oslo_concurrency.lockutils [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] Acquiring lock "refresh_cache-5fb0537f-884d-421c-9f47-ec8fd7236e54" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1944.607702] env[68194]: DEBUG oslo_concurrency.lockutils [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] Acquired lock "refresh_cache-5fb0537f-884d-421c-9f47-ec8fd7236e54" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1944.607858] env[68194]: DEBUG nova.network.neutron [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Refreshing network info cache for port 6b397a48-6e2b-4a18-894b-bcf4a35d1a67 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 1944.673601] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466945, 'name': CreateVM_Task, 'duration_secs': 0.283932} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1944.673766] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 1944.674443] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1944.674613] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1944.674924] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 1944.675186] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0d5fbb50-7352-4f50-beda-315a9fbbc51c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1944.680037] env[68194]: DEBUG oslo_vmware.api [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 1944.680037] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52fcc4c8-b1d4-7d8f-f5a6-e770adf85722" [ 1944.680037] env[68194]: _type = "Task" [ 1944.680037] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1944.688018] env[68194]: DEBUG oslo_vmware.api [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52fcc4c8-b1d4-7d8f-f5a6-e770adf85722, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1944.843855] env[68194]: DEBUG nova.network.neutron [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Updated VIF entry in instance network info cache for port 6b397a48-6e2b-4a18-894b-bcf4a35d1a67. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 1944.844224] env[68194]: DEBUG nova.network.neutron [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Updating instance_info_cache with network_info: [{"id": "6b397a48-6e2b-4a18-894b-bcf4a35d1a67", "address": "fa:16:3e:ba:bb:bc", "network": {"id": "327b172e-477f-4070-9bcb-c1216237bde9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-980534088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05181674899f44e7bb6d234643c3e6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap6b397a48-6e", "ovs_interfaceid": "6b397a48-6e2b-4a18-894b-bcf4a35d1a67", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1944.853778] env[68194]: DEBUG oslo_concurrency.lockutils [req-e5e76815-87ef-49ff-93ba-71f6ac4e0cba req-39baed17-40aa-4815-ae5b-e9572dd8e239 service nova] Releasing lock "refresh_cache-5fb0537f-884d-421c-9f47-ec8fd7236e54" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1945.190116] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1945.190513] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 1945.190618] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1964.417465] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1964.417790] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 1966.418071] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1966.418382] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1968.416576] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1969.418139] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1969.418139] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 1969.418139] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 1969.440323] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1969.440567] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1969.440732] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1969.440863] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1969.440989] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1969.441124] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1969.441246] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1969.441364] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1969.441480] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1969.441595] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 1969.441728] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 1969.442237] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1971.416465] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1971.416850] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1971.416906] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 1971.427994] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1971.428219] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1971.428384] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1971.428540] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 1971.430046] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66398b98-f75a-44cd-8980-8af3d5ff971c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.438574] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f965f24f-9824-498c-a281-502c1bcc46c7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.452481] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d8c7574-7894-412b-baa3-07c2a1047129 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.458555] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0a61492a-1de0-4d5a-9d22-be92ab3ca06e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.488330] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180950MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 1971.488470] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1971.488655] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1971.580191] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 95be4f59-e835-4389-93ae-9814e97f8ef4 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1971.580356] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 73abf0ba-016c-4536-afd3-f6c6960045fc actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1971.580487] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2b833505-f170-46ea-8d14-c449f88a7d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1971.580612] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1971.580765] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4bcfda9d-e14b-441c-aebb-498dbc10513e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1971.580898] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bd9479c5-a9f5-47a6-b731-f0bf4633b688 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1971.581027] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1971.581150] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1971.581271] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8f84a8dc-6908-463c-85e3-f5189e8ca71d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1971.581384] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5fb0537f-884d-421c-9f47-ec8fd7236e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 1971.581570] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 10 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 1971.581707] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1792MB phys_disk=200GB used_disk=10GB total_vcpus=48 used_vcpus=10 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 1971.696353] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7240614d-50f8-4dd2-9a34-2b3a6fe046cc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.704141] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d56518c4-0e1b-492a-8224-9f265f4e1722 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.733510] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7acc4569-43ca-44eb-b1bb-552d5ea9657a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.740452] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-102861f6-5b8c-433c-ad49-292d64127d62 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1971.753187] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1971.761388] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1971.774594] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 1971.774799] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.286s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1990.804063] env[68194]: WARNING oslo_vmware.rw_handles [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 1990.804063] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 1990.804063] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 1990.804063] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 1990.804063] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 1990.804063] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 1990.804063] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 1990.804063] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 1990.804063] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 1990.804063] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 1990.804063] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 1990.804063] env[68194]: ERROR oslo_vmware.rw_handles [ 1990.804699] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/d3435d9e-05dd-47ff-9374-b11d94c40a48/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 1990.807210] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 1990.807457] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Copying Virtual Disk [datastore1] vmware_temp/d3435d9e-05dd-47ff-9374-b11d94c40a48/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/d3435d9e-05dd-47ff-9374-b11d94c40a48/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 1990.807773] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-a345d26c-e487-4d65-af30-75c507ff19e4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1990.815062] env[68194]: DEBUG oslo_vmware.api [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Waiting for the task: (returnval){ [ 1990.815062] env[68194]: value = "task-3466946" [ 1990.815062] env[68194]: _type = "Task" [ 1990.815062] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1990.823389] env[68194]: DEBUG oslo_vmware.api [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Task: {'id': task-3466946, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.326105] env[68194]: DEBUG oslo_vmware.exceptions [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 1991.326105] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1991.326337] env[68194]: ERROR nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1991.326337] env[68194]: Faults: ['InvalidArgument'] [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Traceback (most recent call last): [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] yield resources [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] self.driver.spawn(context, instance, image_meta, [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] self._fetch_image_if_missing(context, vi) [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] image_cache(vi, tmp_image_ds_loc) [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] vm_util.copy_virtual_disk( [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] session._wait_for_task(vmdk_copy_task) [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] return self.wait_for_task(task_ref) [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] return evt.wait() [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] result = hub.switch() [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] return self.greenlet.switch() [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] self.f(*self.args, **self.kw) [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] raise exceptions.translate_fault(task_info.error) [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Faults: ['InvalidArgument'] [ 1991.326337] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] [ 1991.327524] env[68194]: INFO nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Terminating instance [ 1991.328174] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1991.328410] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1991.329028] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-8191fa02-8101-4f84-8852-ce6cd510eb07 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.330871] env[68194]: DEBUG nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1991.331083] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1991.331818] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3372fb1-2f9c-4bb4-864e-b15b1a3a58d5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.338257] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1991.338462] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-5e3a80ed-ca4c-40f4-ba08-37ce67e70b20 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.340573] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1991.340748] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1991.341677] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-872c4192-a085-485d-93e3-774c2461d7b2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.346849] env[68194]: DEBUG oslo_vmware.api [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Waiting for the task: (returnval){ [ 1991.346849] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5214b22c-4ce3-f675-fe39-21542830746d" [ 1991.346849] env[68194]: _type = "Task" [ 1991.346849] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.354350] env[68194]: DEBUG oslo_vmware.api [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5214b22c-4ce3-f675-fe39-21542830746d, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.412679] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1991.412904] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1991.413040] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Deleting the datastore file [datastore1] 95be4f59-e835-4389-93ae-9814e97f8ef4 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1991.413298] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-808b6b6f-ae36-4095-a9fe-79daf739aa49 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.419923] env[68194]: DEBUG oslo_vmware.api [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Waiting for the task: (returnval){ [ 1991.419923] env[68194]: value = "task-3466948" [ 1991.419923] env[68194]: _type = "Task" [ 1991.419923] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1991.427324] env[68194]: DEBUG oslo_vmware.api [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Task: {'id': task-3466948, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1991.857642] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1991.857994] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Creating directory with path [datastore1] vmware_temp/a2e40192-c7dc-41b6-ae3e-90d00c8186cc/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1991.858252] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-755434f7-38bd-4139-9d16-817615f5b7cd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.869688] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Created directory with path [datastore1] vmware_temp/a2e40192-c7dc-41b6-ae3e-90d00c8186cc/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1991.869923] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Fetch image to [datastore1] vmware_temp/a2e40192-c7dc-41b6-ae3e-90d00c8186cc/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1991.870138] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/a2e40192-c7dc-41b6-ae3e-90d00c8186cc/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1991.870860] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a564c829-6127-4842-b863-c56878264494 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.879083] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b9e0f784-047b-45db-8ebb-d1a68d118542 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.889494] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-eed5ed0e-4f7c-48fa-a341-3fa3b5434067 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.920144] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e3f3a21e-71bb-4e91-82c2-6912c53f05de {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.931208] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-ae9a15c8-8b3a-49d0-8621-0c6eb3fd8a31 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1991.932842] env[68194]: DEBUG oslo_vmware.api [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Task: {'id': task-3466948, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.070679} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1991.933100] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1991.933285] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1991.933455] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1991.933711] env[68194]: INFO nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Took 0.60 seconds to destroy the instance on the hypervisor. [ 1991.935649] env[68194]: DEBUG nova.compute.claims [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1991.935838] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1991.936239] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1991.961576] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1992.106554] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1992.107329] env[68194]: ERROR nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Instance failed to spawn: nova.exception.ImageNotAuthorized: Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Traceback (most recent call last): [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] result = getattr(controller, method)(*args, **kwargs) [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self._get(image_id) [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] resp, body = self.http_client.get(url, headers=header) [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self.request(url, 'GET', **kwargs) [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self._handle_response(resp) [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] raise exc.from_response(resp, resp.content) [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] During handling of the above exception, another exception occurred: [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Traceback (most recent call last): [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] yield resources [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self.driver.spawn(context, instance, image_meta, [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self._fetch_image_if_missing(context, vi) [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] image_fetch(context, vi, tmp_image_ds_loc) [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] images.fetch_image( [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1992.107329] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] metadata = IMAGE_API.get(context, image_ref) [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return session.show(context, image_id, [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] _reraise_translated_image_exception(image_id) [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] raise new_exc.with_traceback(exc_trace) [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] result = getattr(controller, method)(*args, **kwargs) [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self._get(image_id) [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] resp, body = self.http_client.get(url, headers=header) [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self.request(url, 'GET', **kwargs) [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self._handle_response(resp) [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] raise exc.from_response(resp, resp.content) [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] nova.exception.ImageNotAuthorized: Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. [ 1992.109090] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] [ 1992.109090] env[68194]: INFO nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Terminating instance [ 1992.109933] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1992.109933] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1992.110058] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquiring lock "refresh_cache-73abf0ba-016c-4536-afd3-f6c6960045fc" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1992.110213] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquired lock "refresh_cache-73abf0ba-016c-4536-afd3-f6c6960045fc" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1992.110381] env[68194]: DEBUG nova.network.neutron [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1992.111282] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-5dc12b83-b867-40d2-bc92-155b6b134ac5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.119295] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1992.119477] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 1992.121381] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e29b8ba6-0c73-4802-b03a-49203912baf0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.125900] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-984a1823-d327-4d29-aaa4-91ed5aa46c41 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.131569] env[68194]: DEBUG oslo_vmware.api [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for the task: (returnval){ [ 1992.131569] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52fd84d4-6873-1c02-da85-f69e58b5ddff" [ 1992.131569] env[68194]: _type = "Task" [ 1992.131569] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.137019] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f78f188-80bd-4d04-a937-3ac40857ad77 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.167958] env[68194]: DEBUG nova.network.neutron [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1992.169758] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 1992.169997] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Creating directory with path [datastore1] vmware_temp/05bf8059-20be-497d-84b0-270799d76e01/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 1992.170664] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-ebd31d32-45be-44c4-8763-b80ffc23e77b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.172785] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6ad8ee0d-3569-4bb1-b803-4d61db79ce6d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.179876] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-88f5a903-cc35-49b1-826e-3dd516b287be {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.194826] env[68194]: DEBUG nova.compute.provider_tree [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1992.198523] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Created directory with path [datastore1] vmware_temp/05bf8059-20be-497d-84b0-270799d76e01/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 1992.198704] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Fetch image to [datastore1] vmware_temp/05bf8059-20be-497d-84b0-270799d76e01/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 1992.198879] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/05bf8059-20be-497d-84b0-270799d76e01/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 1992.199787] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8c57c63c-d17b-456d-bb4c-2e0300a98655 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.203246] env[68194]: DEBUG nova.scheduler.client.report [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1992.210042] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7b830779-1b91-4755-a838-428be19419e8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.219628] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d66cdb7b-51f6-47ec-a52f-dcf1ab1c014a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.223586] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.287s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1992.224098] env[68194]: ERROR nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1992.224098] env[68194]: Faults: ['InvalidArgument'] [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Traceback (most recent call last): [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] self.driver.spawn(context, instance, image_meta, [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] self._fetch_image_if_missing(context, vi) [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] image_cache(vi, tmp_image_ds_loc) [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] vm_util.copy_virtual_disk( [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] session._wait_for_task(vmdk_copy_task) [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] return self.wait_for_task(task_ref) [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] return evt.wait() [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] result = hub.switch() [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] return self.greenlet.switch() [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] self.f(*self.args, **self.kw) [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] raise exceptions.translate_fault(task_info.error) [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Faults: ['InvalidArgument'] [ 1992.224098] env[68194]: ERROR nova.compute.manager [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] [ 1992.224900] env[68194]: DEBUG nova.compute.utils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1992.226448] env[68194]: DEBUG nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Build of instance 95be4f59-e835-4389-93ae-9814e97f8ef4 was re-scheduled: A specified parameter was not correct: fileType [ 1992.226448] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1992.226823] env[68194]: DEBUG nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1992.226998] env[68194]: DEBUG nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1992.227196] env[68194]: DEBUG nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1992.227378] env[68194]: DEBUG nova.network.neutron [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1992.254114] env[68194]: DEBUG nova.network.neutron [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1992.255696] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2773b5e4-f4dd-423d-8141-fb5634f22d05 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.261912] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a7d47e0c-5e1a-4666-85b2-95649d4eb235 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.264243] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Releasing lock "refresh_cache-73abf0ba-016c-4536-afd3-f6c6960045fc" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1992.264607] env[68194]: DEBUG nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1992.264794] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1992.266246] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8bc27022-d0b6-4e8d-be20-1c9b12c0bf7b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.272273] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 1992.272504] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-72de4597-666b-4db1-833d-3b736204eb4b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.283341] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 1992.301439] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 1992.301640] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 1992.301839] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Deleting the datastore file [datastore1] 73abf0ba-016c-4536-afd3-f6c6960045fc {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 1992.303893] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-903387f0-004a-4adf-a611-2f540b032974 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.309743] env[68194]: DEBUG oslo_vmware.api [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Waiting for the task: (returnval){ [ 1992.309743] env[68194]: value = "task-3466950" [ 1992.309743] env[68194]: _type = "Task" [ 1992.309743] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 1992.321337] env[68194]: DEBUG oslo_vmware.api [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Task: {'id': task-3466950, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 1992.338031] env[68194]: DEBUG oslo_vmware.rw_handles [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/05bf8059-20be-497d-84b0-270799d76e01/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 1992.396677] env[68194]: DEBUG oslo_vmware.rw_handles [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 1992.396855] env[68194]: DEBUG oslo_vmware.rw_handles [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/05bf8059-20be-497d-84b0-270799d76e01/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 1992.548950] env[68194]: DEBUG nova.network.neutron [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1992.559536] env[68194]: INFO nova.compute.manager [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Took 0.33 seconds to deallocate network for instance. [ 1992.646685] env[68194]: INFO nova.scheduler.client.report [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Deleted allocations for instance 95be4f59-e835-4389-93ae-9814e97f8ef4 [ 1992.669480] env[68194]: DEBUG oslo_concurrency.lockutils [None req-989184ca-9828-4d61-bb71-b45e34cc04bc tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "95be4f59-e835-4389-93ae-9814e97f8ef4" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 683.839s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1992.669990] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "95be4f59-e835-4389-93ae-9814e97f8ef4" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 488.011s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1992.669990] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Acquiring lock "95be4f59-e835-4389-93ae-9814e97f8ef4-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1992.670227] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "95be4f59-e835-4389-93ae-9814e97f8ef4-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1992.670421] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "95be4f59-e835-4389-93ae-9814e97f8ef4-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1992.672443] env[68194]: INFO nova.compute.manager [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Terminating instance [ 1992.674147] env[68194]: DEBUG nova.compute.manager [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1992.674348] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1992.674814] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7c2023ba-e5e1-4d22-aded-40829afbe97d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.683860] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc85243f-2713-4ae9-8118-844aa7382d65 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.711192] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 95be4f59-e835-4389-93ae-9814e97f8ef4 could not be found. [ 1992.711423] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1992.711604] env[68194]: INFO nova.compute.manager [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1992.711851] env[68194]: DEBUG oslo.service.loopingcall [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1992.712092] env[68194]: DEBUG nova.compute.manager [-] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1992.712191] env[68194]: DEBUG nova.network.neutron [-] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1992.734644] env[68194]: DEBUG nova.network.neutron [-] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1992.742039] env[68194]: INFO nova.compute.manager [-] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] Took 0.03 seconds to deallocate network for instance. [ 1992.825303] env[68194]: DEBUG oslo_vmware.api [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Task: {'id': task-3466950, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.031254} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 1992.825756] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ead21e00-bd44-4f3c-b079-c12bd8909d3d tempest-VolumesAdminNegativeTest-1550969379 tempest-VolumesAdminNegativeTest-1550969379-project-member] Lock "95be4f59-e835-4389-93ae-9814e97f8ef4" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.156s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1992.826576] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 1992.826766] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 1992.826940] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1992.827123] env[68194]: INFO nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Took 0.56 seconds to destroy the instance on the hypervisor. [ 1992.827394] env[68194]: DEBUG oslo.service.loopingcall [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1992.827558] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "95be4f59-e835-4389-93ae-9814e97f8ef4" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 161.356s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1992.827722] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 95be4f59-e835-4389-93ae-9814e97f8ef4] During sync_power_state the instance has a pending task (deleting). Skip. [ 1992.827894] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "95be4f59-e835-4389-93ae-9814e97f8ef4" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1992.828466] env[68194]: DEBUG nova.compute.manager [-] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Skipping network deallocation for instance since networking was not requested. {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1992.830597] env[68194]: DEBUG nova.compute.claims [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 1992.830758] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1992.830966] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1992.977579] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d5faea92-2a06-437f-983b-cf7741a10c75 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1992.984798] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2999e499-6f0b-4e30-a6e2-70f18da9a399 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.016507] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de2163f3-077b-4c0a-b8b3-3ff764224a16 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.023529] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-093f8a20-6615-4689-a226-da0e313947d3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.036996] env[68194]: DEBUG nova.compute.provider_tree [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 1993.045264] env[68194]: DEBUG nova.scheduler.client.report [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 1993.059096] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.228s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1993.059809] env[68194]: ERROR nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Failed to build and run instance: nova.exception.ImageNotAuthorized: Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Traceback (most recent call last): [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] result = getattr(controller, method)(*args, **kwargs) [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self._get(image_id) [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] resp, body = self.http_client.get(url, headers=header) [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self.request(url, 'GET', **kwargs) [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self._handle_response(resp) [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] raise exc.from_response(resp, resp.content) [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] glanceclient.exc.HTTPUnauthorized: HTTP 401 Unauthorized: This server could not verify that you are authorized to access the document you requested. Either you supplied the wrong credentials (e.g., bad password), or your browser does not understand how to supply the credentials required. [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] During handling of the above exception, another exception occurred: [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Traceback (most recent call last): [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self.driver.spawn(context, instance, image_meta, [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self._vmops.spawn(context, instance, image_meta, injected_files, [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self._fetch_image_if_missing(context, vi) [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 637, in _fetch_image_if_missing [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] image_fetch(context, vi, tmp_image_ds_loc) [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 420, in _fetch_image_as_file [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] images.fetch_image( [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/virt/vmwareapi/images.py", line 251, in fetch_image [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] metadata = IMAGE_API.get(context, image_ref) [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 1205, in get [ 1993.059809] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return session.show(context, image_id, [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 287, in show [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] _reraise_translated_image_exception(image_id) [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 1031, in _reraise_translated_image_exception [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] raise new_exc.with_traceback(exc_trace) [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 285, in show [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] image = self._client.call(context, 2, 'get', args=(image_id,)) [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/image/glance.py", line 191, in call [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] result = getattr(controller, method)(*args, **kwargs) [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 197, in get [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self._get(image_id) [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/utils.py", line 649, in inner [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return RequestIdProxy(wrapped(*args, **kwargs)) [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/v2/images.py", line 190, in _get [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] resp, body = self.http_client.get(url, headers=header) [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/keystoneauth1/adapter.py", line 395, in get [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self.request(url, 'GET', **kwargs) [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 380, in request [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self._handle_response(resp) [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/glanceclient/common/http.py", line 120, in _handle_response [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] raise exc.from_response(resp, resp.content) [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] nova.exception.ImageNotAuthorized: Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. [ 1993.060941] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] [ 1993.060941] env[68194]: DEBUG nova.compute.utils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 1993.062305] env[68194]: DEBUG nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Build of instance 73abf0ba-016c-4536-afd3-f6c6960045fc was re-scheduled: Not authorized for image 1feed0b9-f929-4ce4-9c61-ef25290c6d99. {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 1993.062759] env[68194]: DEBUG nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 1993.062981] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquiring lock "refresh_cache-73abf0ba-016c-4536-afd3-f6c6960045fc" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1993.063145] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquired lock "refresh_cache-73abf0ba-016c-4536-afd3-f6c6960045fc" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1993.063310] env[68194]: DEBUG nova.network.neutron [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1993.087862] env[68194]: DEBUG nova.network.neutron [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1993.155669] env[68194]: DEBUG nova.network.neutron [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1993.166254] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Releasing lock "refresh_cache-73abf0ba-016c-4536-afd3-f6c6960045fc" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1993.166514] env[68194]: DEBUG nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 1993.166720] env[68194]: DEBUG nova.compute.manager [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Skipping network deallocation for instance since networking was not requested. {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2259}} [ 1993.261534] env[68194]: INFO nova.scheduler.client.report [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Deleted allocations for instance 73abf0ba-016c-4536-afd3-f6c6960045fc [ 1993.280943] env[68194]: DEBUG oslo_concurrency.lockutils [None req-f8f313f5-c21f-419f-9499-85a3187e5ba3 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Lock "73abf0ba-016c-4536-afd3-f6c6960045fc" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 640.375s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1993.281261] env[68194]: DEBUG oslo_concurrency.lockutils [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Lock "73abf0ba-016c-4536-afd3-f6c6960045fc" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 444.488s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1993.281486] env[68194]: DEBUG oslo_concurrency.lockutils [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquiring lock "73abf0ba-016c-4536-afd3-f6c6960045fc-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 1993.281689] env[68194]: DEBUG oslo_concurrency.lockutils [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Lock "73abf0ba-016c-4536-afd3-f6c6960045fc-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1993.281857] env[68194]: DEBUG oslo_concurrency.lockutils [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Lock "73abf0ba-016c-4536-afd3-f6c6960045fc-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1993.283698] env[68194]: INFO nova.compute.manager [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Terminating instance [ 1993.285241] env[68194]: DEBUG oslo_concurrency.lockutils [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquiring lock "refresh_cache-73abf0ba-016c-4536-afd3-f6c6960045fc" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 1993.285405] env[68194]: DEBUG oslo_concurrency.lockutils [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Acquired lock "refresh_cache-73abf0ba-016c-4536-afd3-f6c6960045fc" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 1993.285572] env[68194]: DEBUG nova.network.neutron [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 1993.318167] env[68194]: DEBUG nova.network.neutron [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 1993.386726] env[68194]: DEBUG nova.network.neutron [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 1993.395259] env[68194]: DEBUG oslo_concurrency.lockutils [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Releasing lock "refresh_cache-73abf0ba-016c-4536-afd3-f6c6960045fc" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 1993.395672] env[68194]: DEBUG nova.compute.manager [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 1993.395866] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 1993.396394] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-60e8ee97-e713-459b-9d31-11208e191a17 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.405380] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-081d764a-c6cb-4f7f-96c1-bd61bb733b85 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 1993.432751] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 73abf0ba-016c-4536-afd3-f6c6960045fc could not be found. [ 1993.432875] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 1993.433062] env[68194]: INFO nova.compute.manager [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Took 0.04 seconds to destroy the instance on the hypervisor. [ 1993.433353] env[68194]: DEBUG oslo.service.loopingcall [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 1993.433586] env[68194]: DEBUG nova.compute.manager [-] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 1993.433682] env[68194]: DEBUG nova.network.neutron [-] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 1993.531686] env[68194]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68194) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 1993.532016] env[68194]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-d2a2eb23-d606-4f58-8617-6d4444ef9ce5'] [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall self._deallocate_network( [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1993.532633] env[68194]: ERROR oslo.service.loopingcall [ 1993.533948] env[68194]: ERROR nova.compute.manager [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1993.560594] env[68194]: ERROR nova.compute.manager [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Traceback (most recent call last): [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] ret = obj(*args, **kwargs) [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] exception_handler_v20(status_code, error_body) [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] raise client_exc(message=error_message, [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Neutron server returns request_ids: ['req-d2a2eb23-d606-4f58-8617-6d4444ef9ce5'] [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] During handling of the above exception, another exception occurred: [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Traceback (most recent call last): [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self._delete_instance(context, instance, bdms) [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self._shutdown_instance(context, instance, bdms) [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self._try_deallocate_network(context, instance, requested_networks) [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] with excutils.save_and_reraise_exception(): [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self.force_reraise() [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] raise self.value [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] _deallocate_network_with_retries() [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return evt.wait() [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] result = hub.switch() [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self.greenlet.switch() [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] result = func(*self.args, **self.kw) [ 1993.560594] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] result = f(*args, **kwargs) [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self._deallocate_network( [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self.network_api.deallocate_for_instance( [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] data = neutron.list_ports(**search_opts) [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] ret = obj(*args, **kwargs) [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self.list('ports', self.ports_path, retrieve_all, [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] ret = obj(*args, **kwargs) [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] for r in self._pagination(collection, path, **params): [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] res = self.get(path, params=params) [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] ret = obj(*args, **kwargs) [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self.retry_request("GET", action, body=body, [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] ret = obj(*args, **kwargs) [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] return self.do_request(method, action, body=body, [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] ret = obj(*args, **kwargs) [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] self._handle_fault_response(status_code, replybody, resp) [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1993.561894] env[68194]: ERROR nova.compute.manager [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] [ 1993.587359] env[68194]: DEBUG oslo_concurrency.lockutils [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Lock "73abf0ba-016c-4536-afd3-f6c6960045fc" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.306s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1993.588381] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "73abf0ba-016c-4536-afd3-f6c6960045fc" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 162.116s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 1993.588613] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] During sync_power_state the instance has a pending task (deleting). Skip. [ 1993.588829] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "73abf0ba-016c-4536-afd3-f6c6960045fc" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 1993.627709] env[68194]: INFO nova.compute.manager [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] [instance: 73abf0ba-016c-4536-afd3-f6c6960045fc] Successfully reverted task state from None on failure for instance. [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server [None req-36ce57d6-3a07-4ebb-bf24-7c12f765ce36 tempest-ServerShowV257Test-985444009 tempest-ServerShowV257Test-985444009-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-d2a2eb23-d606-4f58-8617-6d4444ef9ce5'] [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 1993.630833] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server return evt.wait() [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 1993.632549] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 1993.634230] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 1993.634230] env[68194]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 1993.634230] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 1993.634230] env[68194]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 1993.634230] env[68194]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 1993.634230] env[68194]: ERROR oslo_messaging.rpc.server [ 2009.625719] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquiring lock "073be13d-9a6d-4cfc-997b-f6b61710790a" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2009.626160] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Lock "073be13d-9a6d-4cfc-997b-f6b61710790a" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2009.637993] env[68194]: DEBUG nova.compute.manager [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2009.688587] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2009.688840] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2009.690310] env[68194]: INFO nova.compute.claims [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2009.850024] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f598d7cf-079b-42e0-ab30-baa3d11e0f9b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.857611] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa45ea4a-a620-4c24-bfd1-71d20bf60fb9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.889404] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dbdcfc96-ca76-4c99-aae4-072ec41be6a6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.897549] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8800825-45f7-4b06-8af2-a4a763ecf08a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2009.910656] env[68194]: DEBUG nova.compute.provider_tree [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2009.920161] env[68194]: DEBUG nova.scheduler.client.report [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2009.937606] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.249s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2009.938135] env[68194]: DEBUG nova.compute.manager [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2009.973278] env[68194]: DEBUG nova.compute.utils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2009.975387] env[68194]: DEBUG nova.compute.manager [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2009.975584] env[68194]: DEBUG nova.network.neutron [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2009.987238] env[68194]: DEBUG nova.compute.manager [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2010.045020] env[68194]: DEBUG nova.policy [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eaa3e83d9acf4b68ab12b2439ad7b513', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b53572aed8d9403a8c3c5abf3f070588', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 2010.064156] env[68194]: DEBUG nova.compute.manager [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2010.096784] env[68194]: DEBUG nova.virt.hardware [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2010.099421] env[68194]: DEBUG nova.virt.hardware [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2010.100233] env[68194]: DEBUG nova.virt.hardware [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2010.100594] env[68194]: DEBUG nova.virt.hardware [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2010.100829] env[68194]: DEBUG nova.virt.hardware [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2010.101152] env[68194]: DEBUG nova.virt.hardware [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2010.101443] env[68194]: DEBUG nova.virt.hardware [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2010.101862] env[68194]: DEBUG nova.virt.hardware [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2010.102132] env[68194]: DEBUG nova.virt.hardware [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2010.102390] env[68194]: DEBUG nova.virt.hardware [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2010.102616] env[68194]: DEBUG nova.virt.hardware [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2010.104011] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3100a892-c2aa-4538-ac56-ed8745a441c3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.114043] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8ad4a33-36b8-4ab5-92db-8d8c8af25b64 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2010.390898] env[68194]: DEBUG nova.network.neutron [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Successfully created port: 7ef885d3-f2f5-48d9-813b-dd09f72a5071 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2011.037859] env[68194]: DEBUG nova.compute.manager [req-cdd118b0-5783-42b1-b15b-4ad32e4b34b0 req-f8c59c40-feb4-4386-95b0-f8164bd3bf76 service nova] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Received event network-vif-plugged-7ef885d3-f2f5-48d9-813b-dd09f72a5071 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2011.038113] env[68194]: DEBUG oslo_concurrency.lockutils [req-cdd118b0-5783-42b1-b15b-4ad32e4b34b0 req-f8c59c40-feb4-4386-95b0-f8164bd3bf76 service nova] Acquiring lock "073be13d-9a6d-4cfc-997b-f6b61710790a-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2011.038307] env[68194]: DEBUG oslo_concurrency.lockutils [req-cdd118b0-5783-42b1-b15b-4ad32e4b34b0 req-f8c59c40-feb4-4386-95b0-f8164bd3bf76 service nova] Lock "073be13d-9a6d-4cfc-997b-f6b61710790a-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2011.038475] env[68194]: DEBUG oslo_concurrency.lockutils [req-cdd118b0-5783-42b1-b15b-4ad32e4b34b0 req-f8c59c40-feb4-4386-95b0-f8164bd3bf76 service nova] Lock "073be13d-9a6d-4cfc-997b-f6b61710790a-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2011.038691] env[68194]: DEBUG nova.compute.manager [req-cdd118b0-5783-42b1-b15b-4ad32e4b34b0 req-f8c59c40-feb4-4386-95b0-f8164bd3bf76 service nova] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] No waiting events found dispatching network-vif-plugged-7ef885d3-f2f5-48d9-813b-dd09f72a5071 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2011.038870] env[68194]: WARNING nova.compute.manager [req-cdd118b0-5783-42b1-b15b-4ad32e4b34b0 req-f8c59c40-feb4-4386-95b0-f8164bd3bf76 service nova] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Received unexpected event network-vif-plugged-7ef885d3-f2f5-48d9-813b-dd09f72a5071 for instance with vm_state building and task_state spawning. [ 2011.113912] env[68194]: DEBUG nova.network.neutron [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Successfully updated port: 7ef885d3-f2f5-48d9-813b-dd09f72a5071 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2011.124980] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquiring lock "refresh_cache-073be13d-9a6d-4cfc-997b-f6b61710790a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2011.125135] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquired lock "refresh_cache-073be13d-9a6d-4cfc-997b-f6b61710790a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2011.125269] env[68194]: DEBUG nova.network.neutron [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2011.173406] env[68194]: DEBUG nova.network.neutron [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2011.330199] env[68194]: DEBUG nova.network.neutron [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Updating instance_info_cache with network_info: [{"id": "7ef885d3-f2f5-48d9-813b-dd09f72a5071", "address": "fa:16:3e:a5:9d:db", "network": {"id": "e79e376d-55f2-4078-b6ff-dfe489f79178", "bridge": "br-int", "label": "tempest-ServersTestJSON-366329787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b53572aed8d9403a8c3c5abf3f070588", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ef885d3-f2", "ovs_interfaceid": "7ef885d3-f2f5-48d9-813b-dd09f72a5071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2011.342139] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Releasing lock "refresh_cache-073be13d-9a6d-4cfc-997b-f6b61710790a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2011.342417] env[68194]: DEBUG nova.compute.manager [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Instance network_info: |[{"id": "7ef885d3-f2f5-48d9-813b-dd09f72a5071", "address": "fa:16:3e:a5:9d:db", "network": {"id": "e79e376d-55f2-4078-b6ff-dfe489f79178", "bridge": "br-int", "label": "tempest-ServersTestJSON-366329787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b53572aed8d9403a8c3c5abf3f070588", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ef885d3-f2", "ovs_interfaceid": "7ef885d3-f2f5-48d9-813b-dd09f72a5071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2011.342803] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:a5:9d:db', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89f807d9-140f-4a6f-8bce-96795f9482ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '7ef885d3-f2f5-48d9-813b-dd09f72a5071', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2011.350210] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Creating folder: Project (b53572aed8d9403a8c3c5abf3f070588). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2011.350745] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-71731696-a6f0-4411-8b04-f174bfdbafba {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.361862] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Created folder: Project (b53572aed8d9403a8c3c5abf3f070588) in parent group-v692426. [ 2011.362059] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Creating folder: Instances. Parent ref: group-v692537. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2011.362278] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-864af339-9e3e-4054-a1c7-573e7e897ad9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.370205] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Created folder: Instances in parent group-v692537. [ 2011.370423] env[68194]: DEBUG oslo.service.loopingcall [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2011.370636] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2011.370830] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-e0d81cf5-4e11-4a03-bf77-66c4e527f1a1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.390062] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2011.390062] env[68194]: value = "task-3466953" [ 2011.390062] env[68194]: _type = "Task" [ 2011.390062] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.400682] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466953, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2011.900299] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466953, 'name': CreateVM_Task, 'duration_secs': 0.280815} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2011.900551] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2011.901186] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2011.901361] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2011.901736] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2011.902014] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-21e328e8-3ca9-47c8-8c88-a2c5be1203a6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2011.906711] env[68194]: DEBUG oslo_vmware.api [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Waiting for the task: (returnval){ [ 2011.906711] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5230f39a-cefc-efd7-58e8-6ef180709085" [ 2011.906711] env[68194]: _type = "Task" [ 2011.906711] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2011.917232] env[68194]: DEBUG oslo_vmware.api [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5230f39a-cefc-efd7-58e8-6ef180709085, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2012.416716] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2012.417116] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2012.417213] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2013.068271] env[68194]: DEBUG nova.compute.manager [req-f4681754-258d-404c-afa2-0c2b0a6abd90 req-8c1ce833-8c12-4eba-981c-df277d5ca05f service nova] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Received event network-changed-7ef885d3-f2f5-48d9-813b-dd09f72a5071 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2013.068271] env[68194]: DEBUG nova.compute.manager [req-f4681754-258d-404c-afa2-0c2b0a6abd90 req-8c1ce833-8c12-4eba-981c-df277d5ca05f service nova] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Refreshing instance network info cache due to event network-changed-7ef885d3-f2f5-48d9-813b-dd09f72a5071. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2013.068271] env[68194]: DEBUG oslo_concurrency.lockutils [req-f4681754-258d-404c-afa2-0c2b0a6abd90 req-8c1ce833-8c12-4eba-981c-df277d5ca05f service nova] Acquiring lock "refresh_cache-073be13d-9a6d-4cfc-997b-f6b61710790a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2013.068394] env[68194]: DEBUG oslo_concurrency.lockutils [req-f4681754-258d-404c-afa2-0c2b0a6abd90 req-8c1ce833-8c12-4eba-981c-df277d5ca05f service nova] Acquired lock "refresh_cache-073be13d-9a6d-4cfc-997b-f6b61710790a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2013.068523] env[68194]: DEBUG nova.network.neutron [req-f4681754-258d-404c-afa2-0c2b0a6abd90 req-8c1ce833-8c12-4eba-981c-df277d5ca05f service nova] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Refreshing network info cache for port 7ef885d3-f2f5-48d9-813b-dd09f72a5071 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2013.297357] env[68194]: DEBUG nova.network.neutron [req-f4681754-258d-404c-afa2-0c2b0a6abd90 req-8c1ce833-8c12-4eba-981c-df277d5ca05f service nova] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Updated VIF entry in instance network info cache for port 7ef885d3-f2f5-48d9-813b-dd09f72a5071. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2013.297724] env[68194]: DEBUG nova.network.neutron [req-f4681754-258d-404c-afa2-0c2b0a6abd90 req-8c1ce833-8c12-4eba-981c-df277d5ca05f service nova] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Updating instance_info_cache with network_info: [{"id": "7ef885d3-f2f5-48d9-813b-dd09f72a5071", "address": "fa:16:3e:a5:9d:db", "network": {"id": "e79e376d-55f2-4078-b6ff-dfe489f79178", "bridge": "br-int", "label": "tempest-ServersTestJSON-366329787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.3", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b53572aed8d9403a8c3c5abf3f070588", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap7ef885d3-f2", "ovs_interfaceid": "7ef885d3-f2f5-48d9-813b-dd09f72a5071", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2013.306352] env[68194]: DEBUG oslo_concurrency.lockutils [req-f4681754-258d-404c-afa2-0c2b0a6abd90 req-8c1ce833-8c12-4eba-981c-df277d5ca05f service nova] Releasing lock "refresh_cache-073be13d-9a6d-4cfc-997b-f6b61710790a" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2026.774511] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2026.774852] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2026.776025] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2027.412406] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2028.416666] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2028.417089] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2029.417161] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2029.417550] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2029.417550] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2029.436392] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2029.436594] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2029.436733] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2029.436859] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2029.436982] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2029.437124] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2029.437247] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2029.437365] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2029.437482] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2029.437600] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2029.438103] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2030.350280] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "8f84a8dc-6908-463c-85e3-f5189e8ca71d" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2031.416587] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2031.416913] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2032.415952] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2032.426942] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2032.427275] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2032.427348] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2032.427504] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2032.428969] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cc9041da-e776-417f-b186-c8ad9d928551 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.438226] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78beba61-f0c6-452b-ac75-83e707d8cea8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.452114] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0e5967d5-123c-43c1-81b6-4a49bcecf6f6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.459018] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c9ee5b00-a6e1-4fd5-9c38-003c8ce27b15 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.487046] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180956MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2032.487202] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2032.487426] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2032.560134] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 2b833505-f170-46ea-8d14-c449f88a7d4c actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2032.560310] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2032.560441] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4bcfda9d-e14b-441c-aebb-498dbc10513e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2032.560566] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bd9479c5-a9f5-47a6-b731-f0bf4633b688 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2032.560686] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2032.560805] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2032.560921] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8f84a8dc-6908-463c-85e3-f5189e8ca71d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2032.561109] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5fb0537f-884d-421c-9f47-ec8fd7236e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2032.561235] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 073be13d-9a6d-4cfc-997b-f6b61710790a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2032.561422] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 9 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2032.561560] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1664MB phys_disk=200GB used_disk=9GB total_vcpus=48 used_vcpus=9 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2032.664589] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9d9c8831-2ca1-4de5-a316-71cc366e03bf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.672270] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d13acc07-0fd8-42a7-951d-adcb2629f241 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.702049] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1190fc09-ed16-463b-82ca-0800e1046fb3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.708584] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1f1fe335-4635-4a3e-8cf2-c42dec3896e4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2032.721270] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2032.729080] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2032.741960] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2032.742214] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.255s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2038.722954] env[68194]: WARNING oslo_vmware.rw_handles [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2038.722954] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2038.722954] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2038.722954] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2038.722954] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2038.722954] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 2038.722954] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2038.722954] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2038.722954] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2038.722954] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2038.722954] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2038.722954] env[68194]: ERROR oslo_vmware.rw_handles [ 2038.723754] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/05bf8059-20be-497d-84b0-270799d76e01/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2038.725488] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2038.725734] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Copying Virtual Disk [datastore1] vmware_temp/05bf8059-20be-497d-84b0-270799d76e01/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/05bf8059-20be-497d-84b0-270799d76e01/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2038.726025] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-8be64851-00f4-47fc-a766-3de1a0c6780e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2038.734031] env[68194]: DEBUG oslo_vmware.api [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for the task: (returnval){ [ 2038.734031] env[68194]: value = "task-3466954" [ 2038.734031] env[68194]: _type = "Task" [ 2038.734031] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2038.741302] env[68194]: DEBUG oslo_vmware.api [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': task-3466954, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.244347] env[68194]: DEBUG oslo_vmware.exceptions [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2039.244573] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2039.245115] env[68194]: ERROR nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2039.245115] env[68194]: Faults: ['InvalidArgument'] [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Traceback (most recent call last): [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] yield resources [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] self.driver.spawn(context, instance, image_meta, [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] self._fetch_image_if_missing(context, vi) [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] image_cache(vi, tmp_image_ds_loc) [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] vm_util.copy_virtual_disk( [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] session._wait_for_task(vmdk_copy_task) [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] return self.wait_for_task(task_ref) [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] return evt.wait() [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] result = hub.switch() [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] return self.greenlet.switch() [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] self.f(*self.args, **self.kw) [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] raise exceptions.translate_fault(task_info.error) [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Faults: ['InvalidArgument'] [ 2039.245115] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] [ 2039.246218] env[68194]: INFO nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Terminating instance [ 2039.246925] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2039.247148] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2039.247377] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c0b06ec0-3748-4490-98ca-acb0c42080fc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.249430] env[68194]: DEBUG nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2039.249620] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2039.250335] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4e74db43-ff97-4f15-83f0-62b2e8d32c79 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.257326] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2039.257550] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-c86b0842-a833-4e86-b3f5-0270d0cb8ddc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.259681] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2039.259859] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2039.260811] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a2be7c86-d0bf-4e03-b9b9-dbad5ee8cf73 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.265379] env[68194]: DEBUG oslo_vmware.api [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 2039.265379] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52b1fe48-8bf4-3669-d6b9-a4a9e703e9df" [ 2039.265379] env[68194]: _type = "Task" [ 2039.265379] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.272622] env[68194]: DEBUG oslo_vmware.api [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52b1fe48-8bf4-3669-d6b9-a4a9e703e9df, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.330418] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2039.330631] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2039.330815] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Deleting the datastore file [datastore1] 2b833505-f170-46ea-8d14-c449f88a7d4c {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2039.331104] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-8ee0c0d9-ea8f-4002-a3cb-fd30e7b73a72 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.336860] env[68194]: DEBUG oslo_vmware.api [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for the task: (returnval){ [ 2039.336860] env[68194]: value = "task-3466956" [ 2039.336860] env[68194]: _type = "Task" [ 2039.336860] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2039.344394] env[68194]: DEBUG oslo_vmware.api [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': task-3466956, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2039.774886] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2039.775311] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Creating directory with path [datastore1] vmware_temp/a3ecaf2c-d27b-402a-b46b-12823731d89c/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2039.775393] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c4253ab5-4b32-45bc-9903-4bca3e30a4a6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.787491] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Created directory with path [datastore1] vmware_temp/a3ecaf2c-d27b-402a-b46b-12823731d89c/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2039.787676] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Fetch image to [datastore1] vmware_temp/a3ecaf2c-d27b-402a-b46b-12823731d89c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2039.787848] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/a3ecaf2c-d27b-402a-b46b-12823731d89c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2039.788591] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5e49b88d-e200-4b0c-9e9e-52f6083aa92b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.794906] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b710770d-5cb2-48c8-809c-3346c5dc58f4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.804245] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-485c36cf-88fe-4525-a7e4-c20d7a6e6d83 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.833792] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-29808a54-2883-40d9-af16-b96409fae72b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.841805] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-c55b507f-5ffc-48d7-95eb-4b028571653c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2039.845983] env[68194]: DEBUG oslo_vmware.api [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': task-3466956, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.067172} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2039.846537] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2039.846733] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2039.846907] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2039.847103] env[68194]: INFO nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2039.849245] env[68194]: DEBUG nova.compute.claims [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2039.849415] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2039.849628] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2039.865588] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2039.918150] env[68194]: DEBUG oslo_vmware.rw_handles [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a3ecaf2c-d27b-402a-b46b-12823731d89c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2039.979243] env[68194]: DEBUG oslo_vmware.rw_handles [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2039.979445] env[68194]: DEBUG oslo_vmware.rw_handles [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/a3ecaf2c-d27b-402a-b46b-12823731d89c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2040.052632] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e2b6cd55-3cea-40d7-b496-07979cc98887 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.059757] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6660fcb4-6c53-4a4c-a6cc-28b4b70a2abe {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.090641] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d8959bf4-3d9e-46b8-9a00-01d879639883 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.097800] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac159f4-d6f2-4f83-a906-0d2568907aeb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.110627] env[68194]: DEBUG nova.compute.provider_tree [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2040.118490] env[68194]: DEBUG nova.scheduler.client.report [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2040.132763] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.283s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2040.133314] env[68194]: ERROR nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2040.133314] env[68194]: Faults: ['InvalidArgument'] [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Traceback (most recent call last): [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] self.driver.spawn(context, instance, image_meta, [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] self._fetch_image_if_missing(context, vi) [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] image_cache(vi, tmp_image_ds_loc) [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] vm_util.copy_virtual_disk( [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] session._wait_for_task(vmdk_copy_task) [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] return self.wait_for_task(task_ref) [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] return evt.wait() [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] result = hub.switch() [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] return self.greenlet.switch() [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] self.f(*self.args, **self.kw) [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] raise exceptions.translate_fault(task_info.error) [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Faults: ['InvalidArgument'] [ 2040.133314] env[68194]: ERROR nova.compute.manager [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] [ 2040.134215] env[68194]: DEBUG nova.compute.utils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2040.135760] env[68194]: DEBUG nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Build of instance 2b833505-f170-46ea-8d14-c449f88a7d4c was re-scheduled: A specified parameter was not correct: fileType [ 2040.135760] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2040.136140] env[68194]: DEBUG nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2040.136324] env[68194]: DEBUG nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2040.136495] env[68194]: DEBUG nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2040.136657] env[68194]: DEBUG nova.network.neutron [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2040.446244] env[68194]: DEBUG nova.network.neutron [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.460532] env[68194]: INFO nova.compute.manager [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Took 0.32 seconds to deallocate network for instance. [ 2040.553893] env[68194]: INFO nova.scheduler.client.report [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Deleted allocations for instance 2b833505-f170-46ea-8d14-c449f88a7d4c [ 2040.574375] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1ca15746-c405-4ac5-99c9-8c3aef07d0bd tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "2b833505-f170-46ea-8d14-c449f88a7d4c" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 599.560s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2040.574637] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "2b833505-f170-46ea-8d14-c449f88a7d4c" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 403.270s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2040.574858] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "2b833505-f170-46ea-8d14-c449f88a7d4c-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2040.575077] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "2b833505-f170-46ea-8d14-c449f88a7d4c-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2040.575253] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "2b833505-f170-46ea-8d14-c449f88a7d4c-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2040.577493] env[68194]: INFO nova.compute.manager [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Terminating instance [ 2040.579201] env[68194]: DEBUG nova.compute.manager [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2040.579385] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2040.579856] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-566e8e4c-0b05-4285-9fe0-e96655b8daaf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.589719] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-45e25e09-6b99-47e0-bf6d-2e312c1cbd78 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2040.619495] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 2b833505-f170-46ea-8d14-c449f88a7d4c could not be found. [ 2040.619704] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2040.619887] env[68194]: INFO nova.compute.manager [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2040.620151] env[68194]: DEBUG oslo.service.loopingcall [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2040.620379] env[68194]: DEBUG nova.compute.manager [-] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2040.620473] env[68194]: DEBUG nova.network.neutron [-] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2040.656602] env[68194]: DEBUG nova.network.neutron [-] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2040.665041] env[68194]: INFO nova.compute.manager [-] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] Took 0.04 seconds to deallocate network for instance. [ 2040.753012] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ae8fe200-c95b-4a4a-b757-dc87c8bef00a tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "2b833505-f170-46ea-8d14-c449f88a7d4c" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.178s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2040.754339] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "2b833505-f170-46ea-8d14-c449f88a7d4c" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 209.282s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2040.754339] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 2b833505-f170-46ea-8d14-c449f88a7d4c] During sync_power_state the instance has a pending task (deleting). Skip. [ 2040.754339] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "2b833505-f170-46ea-8d14-c449f88a7d4c" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2046.210439] env[68194]: DEBUG oslo_concurrency.lockutils [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "5fb0537f-884d-421c-9f47-ec8fd7236e54" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2062.182988] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "c2c4ea7a-0be6-48ec-af75-712929e48a5b" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2062.183337] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "c2c4ea7a-0be6-48ec-af75-712929e48a5b" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2062.440525] env[68194]: DEBUG nova.compute.manager [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2062.523974] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2062.524264] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2062.525738] env[68194]: INFO nova.compute.claims [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2062.682071] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b8a799e8-8df5-46ed-a510-725f5119e096 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.689674] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a3d83c46-4250-49df-a25e-ab9d33aff54d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.720612] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-afc0d58c-d7f0-4011-9adf-11937c75b2d1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.728836] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e4cfe9bf-88e8-45c5-9416-bb735ade3e89 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.741736] env[68194]: DEBUG nova.compute.provider_tree [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2062.750648] env[68194]: DEBUG nova.scheduler.client.report [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2062.772267] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.248s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2062.772458] env[68194]: DEBUG nova.compute.manager [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2062.808179] env[68194]: DEBUG nova.compute.utils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2062.809508] env[68194]: DEBUG nova.compute.manager [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2062.809704] env[68194]: DEBUG nova.network.neutron [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2062.818111] env[68194]: DEBUG nova.compute.manager [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2062.882787] env[68194]: DEBUG nova.compute.manager [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2062.900975] env[68194]: DEBUG nova.policy [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '3ee71b35a8e64d6d8fb2a7da304db996', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'e921c94dac1c4681afddfdf2902d672a', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 2062.928841] env[68194]: DEBUG nova.virt.hardware [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2062.929089] env[68194]: DEBUG nova.virt.hardware [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2062.929254] env[68194]: DEBUG nova.virt.hardware [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2062.929435] env[68194]: DEBUG nova.virt.hardware [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2062.929595] env[68194]: DEBUG nova.virt.hardware [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2062.929775] env[68194]: DEBUG nova.virt.hardware [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2062.930015] env[68194]: DEBUG nova.virt.hardware [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2062.930244] env[68194]: DEBUG nova.virt.hardware [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2062.930426] env[68194]: DEBUG nova.virt.hardware [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2062.930593] env[68194]: DEBUG nova.virt.hardware [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2062.930768] env[68194]: DEBUG nova.virt.hardware [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2062.931647] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47747735-6a25-49fc-997f-c7be1f4b78d1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2062.941614] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2134fc6e-fc1e-4c5a-9257-6bbced47f803 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2063.245933] env[68194]: DEBUG nova.network.neutron [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Successfully created port: 75b8906c-2dff-439d-b455-1d1f7079813d {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2063.964030] env[68194]: DEBUG nova.network.neutron [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Successfully updated port: 75b8906c-2dff-439d-b455-1d1f7079813d {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2063.991992] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "refresh_cache-c2c4ea7a-0be6-48ec-af75-712929e48a5b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2063.992160] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquired lock "refresh_cache-c2c4ea7a-0be6-48ec-af75-712929e48a5b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2063.992314] env[68194]: DEBUG nova.network.neutron [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2064.031464] env[68194]: DEBUG nova.network.neutron [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2064.105588] env[68194]: DEBUG nova.compute.manager [req-68f74bca-0b30-4475-9ebd-4a591f2af776 req-394620b0-dc76-4474-89b0-661b7153549d service nova] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Received event network-vif-plugged-75b8906c-2dff-439d-b455-1d1f7079813d {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2064.105846] env[68194]: DEBUG oslo_concurrency.lockutils [req-68f74bca-0b30-4475-9ebd-4a591f2af776 req-394620b0-dc76-4474-89b0-661b7153549d service nova] Acquiring lock "c2c4ea7a-0be6-48ec-af75-712929e48a5b-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2064.106107] env[68194]: DEBUG oslo_concurrency.lockutils [req-68f74bca-0b30-4475-9ebd-4a591f2af776 req-394620b0-dc76-4474-89b0-661b7153549d service nova] Lock "c2c4ea7a-0be6-48ec-af75-712929e48a5b-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2064.106204] env[68194]: DEBUG oslo_concurrency.lockutils [req-68f74bca-0b30-4475-9ebd-4a591f2af776 req-394620b0-dc76-4474-89b0-661b7153549d service nova] Lock "c2c4ea7a-0be6-48ec-af75-712929e48a5b-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2064.106384] env[68194]: DEBUG nova.compute.manager [req-68f74bca-0b30-4475-9ebd-4a591f2af776 req-394620b0-dc76-4474-89b0-661b7153549d service nova] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] No waiting events found dispatching network-vif-plugged-75b8906c-2dff-439d-b455-1d1f7079813d {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2064.106619] env[68194]: WARNING nova.compute.manager [req-68f74bca-0b30-4475-9ebd-4a591f2af776 req-394620b0-dc76-4474-89b0-661b7153549d service nova] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Received unexpected event network-vif-plugged-75b8906c-2dff-439d-b455-1d1f7079813d for instance with vm_state building and task_state spawning. [ 2064.193027] env[68194]: DEBUG nova.network.neutron [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Updating instance_info_cache with network_info: [{"id": "75b8906c-2dff-439d-b455-1d1f7079813d", "address": "fa:16:3e:50:c8:65", "network": {"id": "e47632ac-218f-49b6-baca-b148ca44ea7b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1967602305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e921c94dac1c4681afddfdf2902d672a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75b8906c-2d", "ovs_interfaceid": "75b8906c-2dff-439d-b455-1d1f7079813d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2064.210569] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Releasing lock "refresh_cache-c2c4ea7a-0be6-48ec-af75-712929e48a5b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2064.210868] env[68194]: DEBUG nova.compute.manager [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Instance network_info: |[{"id": "75b8906c-2dff-439d-b455-1d1f7079813d", "address": "fa:16:3e:50:c8:65", "network": {"id": "e47632ac-218f-49b6-baca-b148ca44ea7b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1967602305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e921c94dac1c4681afddfdf2902d672a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75b8906c-2d", "ovs_interfaceid": "75b8906c-2dff-439d-b455-1d1f7079813d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2064.211268] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:50:c8:65', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '75b8906c-2dff-439d-b455-1d1f7079813d', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2064.218925] env[68194]: DEBUG oslo.service.loopingcall [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2064.219394] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2064.219624] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b5d8457e-03e4-4fce-a68a-3b3fe5239de7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.239791] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2064.239791] env[68194]: value = "task-3466957" [ 2064.239791] env[68194]: _type = "Task" [ 2064.239791] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.247217] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466957, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2064.749486] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466957, 'name': CreateVM_Task, 'duration_secs': 0.362548} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2064.749723] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2064.750339] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2064.750501] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2064.750819] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2064.751077] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5de165cb-3b0b-41ca-a350-9bdb2f59ad7a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2064.755222] env[68194]: DEBUG oslo_vmware.api [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Waiting for the task: (returnval){ [ 2064.755222] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]526a03a5-b91c-51bc-83f5-240810bde3b1" [ 2064.755222] env[68194]: _type = "Task" [ 2064.755222] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2064.762256] env[68194]: DEBUG oslo_vmware.api [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]526a03a5-b91c-51bc-83f5-240810bde3b1, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2065.266252] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2065.266560] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2065.266715] env[68194]: DEBUG oslo_concurrency.lockutils [None req-aea74ed8-8b2a-44c6-8ce9-c723df760ac6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2066.130168] env[68194]: DEBUG nova.compute.manager [req-aae70c18-9861-4a87-9d38-07f68a16c425 req-8b9770fc-1ba4-40c6-8c66-e58f4b514bf1 service nova] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Received event network-changed-75b8906c-2dff-439d-b455-1d1f7079813d {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2066.130320] env[68194]: DEBUG nova.compute.manager [req-aae70c18-9861-4a87-9d38-07f68a16c425 req-8b9770fc-1ba4-40c6-8c66-e58f4b514bf1 service nova] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Refreshing instance network info cache due to event network-changed-75b8906c-2dff-439d-b455-1d1f7079813d. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2066.130541] env[68194]: DEBUG oslo_concurrency.lockutils [req-aae70c18-9861-4a87-9d38-07f68a16c425 req-8b9770fc-1ba4-40c6-8c66-e58f4b514bf1 service nova] Acquiring lock "refresh_cache-c2c4ea7a-0be6-48ec-af75-712929e48a5b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2066.130685] env[68194]: DEBUG oslo_concurrency.lockutils [req-aae70c18-9861-4a87-9d38-07f68a16c425 req-8b9770fc-1ba4-40c6-8c66-e58f4b514bf1 service nova] Acquired lock "refresh_cache-c2c4ea7a-0be6-48ec-af75-712929e48a5b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2066.130850] env[68194]: DEBUG nova.network.neutron [req-aae70c18-9861-4a87-9d38-07f68a16c425 req-8b9770fc-1ba4-40c6-8c66-e58f4b514bf1 service nova] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Refreshing network info cache for port 75b8906c-2dff-439d-b455-1d1f7079813d {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2066.527475] env[68194]: DEBUG nova.network.neutron [req-aae70c18-9861-4a87-9d38-07f68a16c425 req-8b9770fc-1ba4-40c6-8c66-e58f4b514bf1 service nova] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Updated VIF entry in instance network info cache for port 75b8906c-2dff-439d-b455-1d1f7079813d. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2066.527842] env[68194]: DEBUG nova.network.neutron [req-aae70c18-9861-4a87-9d38-07f68a16c425 req-8b9770fc-1ba4-40c6-8c66-e58f4b514bf1 service nova] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Updating instance_info_cache with network_info: [{"id": "75b8906c-2dff-439d-b455-1d1f7079813d", "address": "fa:16:3e:50:c8:65", "network": {"id": "e47632ac-218f-49b6-baca-b148ca44ea7b", "bridge": "br-int", "label": "tempest-AttachVolumeNegativeTest-1967602305-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.10", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "e921c94dac1c4681afddfdf2902d672a", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "0ed262a3-f84f-4e1c-bbaf-c6a10e0c243e", "external-id": "nsx-vlan-transportzone-146", "segmentation_id": 146, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap75b8906c-2d", "ovs_interfaceid": "75b8906c-2dff-439d-b455-1d1f7079813d", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2066.536944] env[68194]: DEBUG oslo_concurrency.lockutils [req-aae70c18-9861-4a87-9d38-07f68a16c425 req-8b9770fc-1ba4-40c6-8c66-e58f4b514bf1 service nova] Releasing lock "refresh_cache-c2c4ea7a-0be6-48ec-af75-712929e48a5b" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2085.839188] env[68194]: WARNING oslo_vmware.rw_handles [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2085.839188] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2085.839188] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2085.839188] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2085.839188] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2085.839188] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 2085.839188] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2085.839188] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2085.839188] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2085.839188] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2085.839188] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2085.839188] env[68194]: ERROR oslo_vmware.rw_handles [ 2085.839828] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/a3ecaf2c-d27b-402a-b46b-12823731d89c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2085.841691] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2085.841924] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Copying Virtual Disk [datastore1] vmware_temp/a3ecaf2c-d27b-402a-b46b-12823731d89c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/a3ecaf2c-d27b-402a-b46b-12823731d89c/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2085.842251] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-b42cc7ca-27d5-4ed1-8fca-3e1dba561828 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2085.850426] env[68194]: DEBUG oslo_vmware.api [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 2085.850426] env[68194]: value = "task-3466958" [ 2085.850426] env[68194]: _type = "Task" [ 2085.850426] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2085.858584] env[68194]: DEBUG oslo_vmware.api [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': task-3466958, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.360834] env[68194]: DEBUG oslo_vmware.exceptions [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2086.360834] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2086.362658] env[68194]: ERROR nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2086.362658] env[68194]: Faults: ['InvalidArgument'] [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Traceback (most recent call last): [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] yield resources [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] self.driver.spawn(context, instance, image_meta, [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] self._fetch_image_if_missing(context, vi) [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] image_cache(vi, tmp_image_ds_loc) [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] vm_util.copy_virtual_disk( [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] session._wait_for_task(vmdk_copy_task) [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] return self.wait_for_task(task_ref) [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] return evt.wait() [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] result = hub.switch() [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] return self.greenlet.switch() [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] self.f(*self.args, **self.kw) [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] raise exceptions.translate_fault(task_info.error) [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Faults: ['InvalidArgument'] [ 2086.362658] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] [ 2086.362658] env[68194]: INFO nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Terminating instance [ 2086.363977] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2086.363977] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2086.363977] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-53357e65-63b6-404e-bd98-b3d3aec6d873 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.365998] env[68194]: DEBUG nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2086.366202] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2086.366932] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fa211b62-3ebd-4e9b-ab25-913f9539d697 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.374933] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2086.375156] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-2b8f5792-afe7-4212-bc5d-010e6a595b28 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.377275] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2086.377450] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2086.378348] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-f1cc1478-c4bb-42d0-a9f8-df047f26c7a6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.382828] env[68194]: DEBUG oslo_vmware.api [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Waiting for the task: (returnval){ [ 2086.382828] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5291a306-d7c6-260f-8aba-b06cfdc32327" [ 2086.382828] env[68194]: _type = "Task" [ 2086.382828] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.395430] env[68194]: DEBUG oslo_vmware.api [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5291a306-d7c6-260f-8aba-b06cfdc32327, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.415927] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2086.416083] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2086.416281] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2086.447598] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2086.447824] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2086.447983] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Deleting the datastore file [datastore1] d5bc98e3-9621-41bb-90a3-2f8e80c6928b {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2086.448263] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-37e0bc3e-2bbd-4503-8886-90b32f59a1fd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.455581] env[68194]: DEBUG oslo_vmware.api [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 2086.455581] env[68194]: value = "task-3466960" [ 2086.455581] env[68194]: _type = "Task" [ 2086.455581] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2086.464459] env[68194]: DEBUG oslo_vmware.api [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': task-3466960, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2086.892770] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2086.893120] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Creating directory with path [datastore1] vmware_temp/2be37868-69f6-4a44-9759-467724f976fd/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2086.893340] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a91f810a-c8a0-410f-9aa4-304d208df94c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.904668] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Created directory with path [datastore1] vmware_temp/2be37868-69f6-4a44-9759-467724f976fd/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2086.904668] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Fetch image to [datastore1] vmware_temp/2be37868-69f6-4a44-9759-467724f976fd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2086.904668] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/2be37868-69f6-4a44-9759-467724f976fd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2086.909018] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-65d452a0-c7e0-4910-a516-37e9b9934cf4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.912385] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ca01286d-ad08-4f0e-8729-a102022211f4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.921456] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9f8f1441-866e-47a9-9030-ebed89650425 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.952496] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0449259e-a503-4a9a-bc4f-71b20fbe6371 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.961314] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-64b116d1-dbb2-4c44-8f7a-858bf214abc1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2086.965679] env[68194]: DEBUG oslo_vmware.api [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': task-3466960, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.071407} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2086.966205] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2086.966406] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2086.966580] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2086.966750] env[68194]: INFO nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2086.968900] env[68194]: DEBUG nova.compute.claims [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2086.969095] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2086.969309] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2086.984177] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2087.040606] env[68194]: DEBUG oslo_vmware.rw_handles [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2be37868-69f6-4a44-9759-467724f976fd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2087.102192] env[68194]: DEBUG oslo_vmware.rw_handles [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2087.102399] env[68194]: DEBUG oslo_vmware.rw_handles [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Closing write handle for https://esx7c1n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/2be37868-69f6-4a44-9759-467724f976fd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2087.182059] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-261b4af6-4f32-42df-a668-d64d001d4300 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.189890] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5b482015-4a4e-4114-bf33-37b52a3470e4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.220874] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5fc5e1ee-9e86-4e38-b605-9bc52e00ceb4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.228581] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d2482603-d6a9-43a4-9710-960fbbdfd1cd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.241687] env[68194]: DEBUG nova.compute.provider_tree [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2087.252427] env[68194]: DEBUG nova.scheduler.client.report [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2087.268392] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.299s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2087.268945] env[68194]: ERROR nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2087.268945] env[68194]: Faults: ['InvalidArgument'] [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Traceback (most recent call last): [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] self.driver.spawn(context, instance, image_meta, [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] self._fetch_image_if_missing(context, vi) [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] image_cache(vi, tmp_image_ds_loc) [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] vm_util.copy_virtual_disk( [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] session._wait_for_task(vmdk_copy_task) [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] return self.wait_for_task(task_ref) [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] return evt.wait() [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] result = hub.switch() [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] return self.greenlet.switch() [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] self.f(*self.args, **self.kw) [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] raise exceptions.translate_fault(task_info.error) [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Faults: ['InvalidArgument'] [ 2087.268945] env[68194]: ERROR nova.compute.manager [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] [ 2087.269843] env[68194]: DEBUG nova.compute.utils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2087.271118] env[68194]: DEBUG nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Build of instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b was re-scheduled: A specified parameter was not correct: fileType [ 2087.271118] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2087.271487] env[68194]: DEBUG nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2087.271666] env[68194]: DEBUG nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2087.271839] env[68194]: DEBUG nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2087.272010] env[68194]: DEBUG nova.network.neutron [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2087.573704] env[68194]: DEBUG nova.network.neutron [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2087.585915] env[68194]: INFO nova.compute.manager [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Took 0.31 seconds to deallocate network for instance. [ 2087.682027] env[68194]: INFO nova.scheduler.client.report [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Deleted allocations for instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b [ 2087.700114] env[68194]: DEBUG oslo_concurrency.lockutils [None req-95d4dac4-b7b5-4220-ab59-5fb4ee8f0f55 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 631.355s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2087.701859] env[68194]: DEBUG oslo_concurrency.lockutils [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 435.070s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2087.701859] env[68194]: DEBUG oslo_concurrency.lockutils [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2087.701859] env[68194]: DEBUG oslo_concurrency.lockutils [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2087.701859] env[68194]: DEBUG oslo_concurrency.lockutils [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2087.704241] env[68194]: INFO nova.compute.manager [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Terminating instance [ 2087.705988] env[68194]: DEBUG nova.compute.manager [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2087.706836] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2087.706836] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-9814d91f-32b7-4eab-b329-e3450e405641 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.715412] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0700a576-db7b-4d33-86bb-096f1fe9cb6c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2087.744218] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance d5bc98e3-9621-41bb-90a3-2f8e80c6928b could not be found. [ 2087.744450] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2087.744634] env[68194]: INFO nova.compute.manager [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2087.744873] env[68194]: DEBUG oslo.service.loopingcall [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2087.745116] env[68194]: DEBUG nova.compute.manager [-] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2087.745214] env[68194]: DEBUG nova.network.neutron [-] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2087.768917] env[68194]: DEBUG nova.network.neutron [-] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2087.776663] env[68194]: INFO nova.compute.manager [-] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] Took 0.03 seconds to deallocate network for instance. [ 2087.860988] env[68194]: DEBUG oslo_concurrency.lockutils [None req-be6d9d36-064e-48ef-99c3-323bc027844f tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.160s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2087.862035] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 256.389s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2087.862035] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: d5bc98e3-9621-41bb-90a3-2f8e80c6928b] During sync_power_state the instance has a pending task (deleting). Skip. [ 2087.862258] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "d5bc98e3-9621-41bb-90a3-2f8e80c6928b" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2088.423646] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2088.423958] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2090.416365] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2090.416654] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2090.416696] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2090.434650] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2090.434810] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2090.434945] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2090.435095] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2090.435233] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2090.435352] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2090.435474] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2090.435595] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2090.435717] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2090.436250] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2090.436428] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2091.416949] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2092.416136] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2092.428023] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2092.428023] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2092.428023] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2092.428023] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2092.428648] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ec619ce7-7bdc-482b-8374-7764e9a67717 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.437872] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6202c1e6-686f-457e-b9f1-93c73d71d31f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.452058] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7481d6d3-b390-45de-887c-56db506ab80f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.458598] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8f08bed4-c48b-4ae1-839c-89a5fc548de5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.488268] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180968MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2092.488452] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2092.488597] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2092.586564] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 4bcfda9d-e14b-441c-aebb-498dbc10513e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2092.586564] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bd9479c5-a9f5-47a6-b731-f0bf4633b688 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2092.586564] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2092.586564] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2092.586564] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8f84a8dc-6908-463c-85e3-f5189e8ca71d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2092.586564] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5fb0537f-884d-421c-9f47-ec8fd7236e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2092.586564] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 073be13d-9a6d-4cfc-997b-f6b61710790a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2092.586564] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance c2c4ea7a-0be6-48ec-af75-712929e48a5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2092.586564] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 8 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2092.586564] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1536MB phys_disk=200GB used_disk=8GB total_vcpus=48 used_vcpus=8 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2092.604872] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing inventories for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2092.616640] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Updating ProviderTree inventory for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2092.616817] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Updating inventory in ProviderTree for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2092.626506] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing aggregate associations for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5, aggregates: None {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2092.644192] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing trait associations for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2092.731617] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a975843e-3952-4612-8110-3009b00e64b7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.739046] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0bba3cf1-0976-40c9-8b59-4f60a4164f51 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.767609] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-942e419f-11cf-4203-9224-5bbcd009ab90 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.774261] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e51cb09-dd23-4126-bdb2-78760e918da4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2092.786788] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2092.794612] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2092.807375] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2092.807527] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.319s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2093.803716] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2097.416511] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2097.416745] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Cleaning up deleted instances {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 2097.425410] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] There are 0 instances to clean {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 2101.200842] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2105.416556] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_incomplete_migrations {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2105.416839] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Cleaning up deleted instances with incomplete migration {{(pid=68194) _cleanup_incomplete_migrations /opt/stack/nova/nova/compute/manager.py:11236}} [ 2133.125636] env[68194]: WARNING oslo_vmware.rw_handles [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2133.125636] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2133.125636] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2133.125636] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2133.125636] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2133.125636] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 2133.125636] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2133.125636] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2133.125636] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2133.125636] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2133.125636] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2133.125636] env[68194]: ERROR oslo_vmware.rw_handles [ 2133.126561] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/2be37868-69f6-4a44-9759-467724f976fd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2133.128157] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2133.128413] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Copying Virtual Disk [datastore1] vmware_temp/2be37868-69f6-4a44-9759-467724f976fd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/2be37868-69f6-4a44-9759-467724f976fd/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2133.128706] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-82622f75-4bd0-48b5-bd95-5b4890648490 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.137666] env[68194]: DEBUG oslo_vmware.api [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Waiting for the task: (returnval){ [ 2133.137666] env[68194]: value = "task-3466961" [ 2133.137666] env[68194]: _type = "Task" [ 2133.137666] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.145699] env[68194]: DEBUG oslo_vmware.api [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Task: {'id': task-3466961, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.647879] env[68194]: DEBUG oslo_vmware.exceptions [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2133.648183] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2133.648719] env[68194]: ERROR nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2133.648719] env[68194]: Faults: ['InvalidArgument'] [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Traceback (most recent call last): [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] yield resources [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self.driver.spawn(context, instance, image_meta, [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._fetch_image_if_missing(context, vi) [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] image_cache(vi, tmp_image_ds_loc) [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] vm_util.copy_virtual_disk( [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] session._wait_for_task(vmdk_copy_task) [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.wait_for_task(task_ref) [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return evt.wait() [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] result = hub.switch() [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.greenlet.switch() [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self.f(*self.args, **self.kw) [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] raise exceptions.translate_fault(task_info.error) [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Faults: ['InvalidArgument'] [ 2133.648719] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] [ 2133.649820] env[68194]: INFO nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Terminating instance [ 2133.650540] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2133.650774] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2133.651015] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-054daf09-6451-4064-8681-6cb12c6b7997 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.653107] env[68194]: DEBUG nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2133.653300] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2133.654019] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecd5fca2-2f79-45ae-bbab-8e006d78c8a3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.660665] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2133.660886] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-44f51dbf-4e24-4106-84c9-dd6248198074 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.663008] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2133.663186] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2133.664123] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-283d3b9b-f5b0-40fa-9d5d-968431fc1674 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.668790] env[68194]: DEBUG oslo_vmware.api [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Waiting for the task: (returnval){ [ 2133.668790] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]525f7c75-b452-e523-694a-e1ca65a0650c" [ 2133.668790] env[68194]: _type = "Task" [ 2133.668790] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.677468] env[68194]: DEBUG oslo_vmware.api [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]525f7c75-b452-e523-694a-e1ca65a0650c, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2133.763777] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2133.764068] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2133.764217] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Deleting the datastore file [datastore1] 4bcfda9d-e14b-441c-aebb-498dbc10513e {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2133.764478] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-e57ddaf7-55df-4176-9e9b-dc377cfcfe7e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2133.770954] env[68194]: DEBUG oslo_vmware.api [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Waiting for the task: (returnval){ [ 2133.770954] env[68194]: value = "task-3466963" [ 2133.770954] env[68194]: _type = "Task" [ 2133.770954] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2133.780536] env[68194]: DEBUG oslo_vmware.api [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Task: {'id': task-3466963, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2134.179154] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2134.179467] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Creating directory with path [datastore1] vmware_temp/59815986-8a29-4fb8-848f-8dcf7f45d7bf/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2134.179657] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a434bd21-475a-440e-8431-17a4643ae8a0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.190744] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Created directory with path [datastore1] vmware_temp/59815986-8a29-4fb8-848f-8dcf7f45d7bf/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2134.190930] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Fetch image to [datastore1] vmware_temp/59815986-8a29-4fb8-848f-8dcf7f45d7bf/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2134.191105] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/59815986-8a29-4fb8-848f-8dcf7f45d7bf/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2134.191826] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-04476665-c064-4ac8-a87d-72d923a21f1e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.198162] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f2bda8aa-dacd-4adc-85eb-14d2cc187e75 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.207016] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1540bd5f-ff37-4189-906d-a881846399e7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.236575] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3db295a3-767e-41d8-a7ec-3b77b680f890 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.241789] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-70786d7a-6360-4a7e-9beb-6c993e75d056 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.267894] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2134.279690] env[68194]: DEBUG oslo_vmware.api [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Task: {'id': task-3466963, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.103232} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2134.279926] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2134.280130] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2134.280332] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2134.280488] env[68194]: INFO nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Took 0.63 seconds to destroy the instance on the hypervisor. [ 2134.282576] env[68194]: DEBUG nova.compute.claims [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2134.282772] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2134.283048] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2134.316941] env[68194]: DEBUG oslo_vmware.rw_handles [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/59815986-8a29-4fb8-848f-8dcf7f45d7bf/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2134.376411] env[68194]: DEBUG oslo_vmware.rw_handles [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2134.376590] env[68194]: DEBUG oslo_vmware.rw_handles [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Closing write handle for https://esx7c2n2.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/59815986-8a29-4fb8-848f-8dcf7f45d7bf/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2134.475124] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ba88d9c9-c4cc-4f61-a5fb-5d9c9f162778 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.482483] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f9bdea55-8045-4f01-bd2d-136008f4e15e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.512646] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4929acb9-af08-4b57-bd90-e5dcbfd23563 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.519374] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054874f8-b423-4754-be65-ce796e3795ef {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.532162] env[68194]: DEBUG nova.compute.provider_tree [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2134.540581] env[68194]: DEBUG nova.scheduler.client.report [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2134.556971] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.274s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2134.557512] env[68194]: ERROR nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2134.557512] env[68194]: Faults: ['InvalidArgument'] [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Traceback (most recent call last): [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self.driver.spawn(context, instance, image_meta, [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._fetch_image_if_missing(context, vi) [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] image_cache(vi, tmp_image_ds_loc) [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] vm_util.copy_virtual_disk( [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] session._wait_for_task(vmdk_copy_task) [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.wait_for_task(task_ref) [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return evt.wait() [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] result = hub.switch() [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.greenlet.switch() [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self.f(*self.args, **self.kw) [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] raise exceptions.translate_fault(task_info.error) [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Faults: ['InvalidArgument'] [ 2134.557512] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] [ 2134.558534] env[68194]: DEBUG nova.compute.utils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2134.559617] env[68194]: DEBUG nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Build of instance 4bcfda9d-e14b-441c-aebb-498dbc10513e was re-scheduled: A specified parameter was not correct: fileType [ 2134.559617] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2134.560029] env[68194]: DEBUG nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2134.560215] env[68194]: DEBUG nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2134.560378] env[68194]: DEBUG nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2134.560545] env[68194]: DEBUG nova.network.neutron [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2134.655745] env[68194]: DEBUG neutronclient.v2_0.client [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68194) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2134.656803] env[68194]: ERROR nova.compute.manager [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Failed to deallocate networks: nova.exception.Unauthorized: Not authorized. [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Traceback (most recent call last): [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self.driver.spawn(context, instance, image_meta, [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._fetch_image_if_missing(context, vi) [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] image_cache(vi, tmp_image_ds_loc) [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] vm_util.copy_virtual_disk( [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] session._wait_for_task(vmdk_copy_task) [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.wait_for_task(task_ref) [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return evt.wait() [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] result = hub.switch() [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.greenlet.switch() [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self.f(*self.args, **self.kw) [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] raise exceptions.translate_fault(task_info.error) [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Faults: ['InvalidArgument'] [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] During handling of the above exception, another exception occurred: [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Traceback (most recent call last): [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 2430, in _do_build_and_run_instance [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._build_and_run_instance(context, instance, image, [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 2722, in _build_and_run_instance [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] raise exception.RescheduledException( [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] nova.exception.RescheduledException: Build of instance 4bcfda9d-e14b-441c-aebb-498dbc10513e was re-scheduled: A specified parameter was not correct: fileType [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Faults: ['InvalidArgument'] [ 2134.656803] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] During handling of the above exception, another exception occurred: [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Traceback (most recent call last): [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] ret = obj(*args, **kwargs) [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] exception_handler_v20(status_code, error_body) [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] raise client_exc(message=error_message, [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Neutron server returns request_ids: ['req-0dc9e6bb-9707-4187-a9a8-056dc520c0fe'] [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] During handling of the above exception, another exception occurred: [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Traceback (most recent call last): [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 3019, in _cleanup_allocated_networks [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._deallocate_network(context, instance, requested_networks) [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self.network_api.deallocate_for_instance( [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] data = neutron.list_ports(**search_opts) [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] ret = obj(*args, **kwargs) [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.list('ports', self.ports_path, retrieve_all, [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] ret = obj(*args, **kwargs) [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] for r in self._pagination(collection, path, **params): [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] res = self.get(path, params=params) [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] ret = obj(*args, **kwargs) [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.retry_request("GET", action, body=body, [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] ret = obj(*args, **kwargs) [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2134.658490] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.do_request(method, action, body=body, [ 2134.660154] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.660154] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] ret = obj(*args, **kwargs) [ 2134.660154] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2134.660154] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._handle_fault_response(status_code, replybody, resp) [ 2134.660154] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 204, in wrapper [ 2134.660154] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] raise exception.Unauthorized() [ 2134.660154] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] nova.exception.Unauthorized: Not authorized. [ 2134.660154] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] [ 2134.706910] env[68194]: INFO nova.scheduler.client.report [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Deleted allocations for instance 4bcfda9d-e14b-441c-aebb-498dbc10513e [ 2134.725665] env[68194]: DEBUG oslo_concurrency.lockutils [None req-1b7a458a-5d29-4b03-a80c-363c0cc8e6fa tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "4bcfda9d-e14b-441c-aebb-498dbc10513e" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 522.445s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2134.725922] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "4bcfda9d-e14b-441c-aebb-498dbc10513e" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 326.731s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2134.726152] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Acquiring lock "4bcfda9d-e14b-441c-aebb-498dbc10513e-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2134.726353] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "4bcfda9d-e14b-441c-aebb-498dbc10513e-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2134.726530] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "4bcfda9d-e14b-441c-aebb-498dbc10513e-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2134.728769] env[68194]: INFO nova.compute.manager [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Terminating instance [ 2134.730485] env[68194]: DEBUG nova.compute.manager [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2134.730713] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2134.731288] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-7175a6b4-67f3-4a79-8a52-66a6ec18fc4f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.740237] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-13f44bbb-be3e-4101-9378-1f836e2b4069 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2134.768167] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 4bcfda9d-e14b-441c-aebb-498dbc10513e could not be found. [ 2134.768167] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2134.768167] env[68194]: INFO nova.compute.manager [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2134.768167] env[68194]: DEBUG oslo.service.loopingcall [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2134.769123] env[68194]: DEBUG nova.compute.manager [-] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2134.769123] env[68194]: DEBUG nova.network.neutron [-] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2134.856229] env[68194]: DEBUG neutronclient.v2_0.client [-] Error message: {"error": {"code": 401, "title": "Unauthorized", "message": "The request you have made requires authentication."}} {{(pid=68194) _handle_fault_response /opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py:262}} [ 2134.856574] env[68194]: ERROR nova.network.neutron [-] Neutron client was not able to generate a valid admin token, please verify Neutron admin credential located in nova.conf: neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall [-] Dynamic interval looping call 'oslo_service.loopingcall.RetryDecorator.__call__.._func' failed: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall exception_handler_v20(status_code, error_body) [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall raise client_exc(message=error_message, [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall Neutron server returns request_ids: ['req-d837da9e-14b6-465f-b7f7-fcfe0d38d592'] [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall During handling of the above exception, another exception occurred: [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall Traceback (most recent call last): [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall result = func(*self.args, **self.kw) [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall result = f(*args, **kwargs) [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall self._deallocate_network( [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall self.network_api.deallocate_for_instance( [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall data = neutron.list_ports(**search_opts) [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall return self.list('ports', self.ports_path, retrieve_all, [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall for r in self._pagination(collection, path, **params): [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall res = self.get(path, params=params) [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall return self.retry_request("GET", action, body=body, [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall return self.do_request(method, action, body=body, [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall ret = obj(*args, **kwargs) [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall self._handle_fault_response(status_code, replybody, resp) [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2134.857273] env[68194]: ERROR oslo.service.loopingcall [ 2134.858827] env[68194]: ERROR nova.compute.manager [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Failed to deallocate network for instance. Error: Networking client is experiencing an unauthorized exception.: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2134.894197] env[68194]: ERROR nova.compute.manager [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Setting instance vm_state to ERROR: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Traceback (most recent call last): [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] ret = obj(*args, **kwargs) [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] exception_handler_v20(status_code, error_body) [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] raise client_exc(message=error_message, [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Neutron server returns request_ids: ['req-d837da9e-14b6-465f-b7f7-fcfe0d38d592'] [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] During handling of the above exception, another exception occurred: [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Traceback (most recent call last): [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._delete_instance(context, instance, bdms) [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._shutdown_instance(context, instance, bdms) [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._try_deallocate_network(context, instance, requested_networks) [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] with excutils.save_and_reraise_exception(): [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self.force_reraise() [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] raise self.value [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] _deallocate_network_with_retries() [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return evt.wait() [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] result = hub.switch() [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.greenlet.switch() [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] result = func(*self.args, **self.kw) [ 2134.894197] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] result = f(*args, **kwargs) [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._deallocate_network( [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self.network_api.deallocate_for_instance( [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] data = neutron.list_ports(**search_opts) [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] ret = obj(*args, **kwargs) [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.list('ports', self.ports_path, retrieve_all, [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] ret = obj(*args, **kwargs) [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] for r in self._pagination(collection, path, **params): [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] res = self.get(path, params=params) [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] ret = obj(*args, **kwargs) [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.retry_request("GET", action, body=body, [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] ret = obj(*args, **kwargs) [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] return self.do_request(method, action, body=body, [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] ret = obj(*args, **kwargs) [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] self._handle_fault_response(status_code, replybody, resp) [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2134.895497] env[68194]: ERROR nova.compute.manager [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] [ 2134.922744] env[68194]: DEBUG oslo_concurrency.lockutils [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Lock "4bcfda9d-e14b-441c-aebb-498dbc10513e" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2134.924672] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "4bcfda9d-e14b-441c-aebb-498dbc10513e" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 303.451s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2134.924672] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] During sync_power_state the instance has a pending task (deleting). Skip. [ 2134.924672] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "4bcfda9d-e14b-441c-aebb-498dbc10513e" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2134.973037] env[68194]: INFO nova.compute.manager [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] [instance: 4bcfda9d-e14b-441c-aebb-498dbc10513e] Successfully reverted task state from None on failure for instance. [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server [None req-c675abfe-a6b7-4c66-90d3-8db7ad0a125b tempest-ServersTestMultiNic-1453682960 tempest-ServersTestMultiNic-1453682960-project-member] Exception during message handling: nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 272, in _handle_fault_response [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server exception_handler_v20(status_code, error_body) [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 90, in exception_handler_v20 [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server raise client_exc(message=error_message, [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server neutronclient.common.exceptions.Unauthorized: 401-{'error': {'code': 401, 'title': 'Unauthorized', 'message': 'The request you have made requires authentication.'}} [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server Neutron server returns request_ids: ['req-d837da9e-14b6-465f-b7f7-fcfe0d38d592'] [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server During handling of the above exception, another exception occurred: [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server Traceback (most recent call last): [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/server.py", line 165, in _process_incoming [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server res = self.dispatcher.dispatch(message) [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 309, in dispatch [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server return self._do_dispatch(endpoint, method, ctxt, args) [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_messaging/rpc/dispatcher.py", line 229, in _do_dispatch [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server result = func(ctxt, **new_args) [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 65, in wrapped [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/exception_wrapper.py", line 63, in wrapped [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server return f(self, context, *args, **kw) [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 166, in decorated_function [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 157, in decorated_function [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/utils.py", line 1439, in decorated_function [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 213, in decorated_function [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 203, in decorated_function [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server return function(self, context, *args, **kwargs) [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3327, in terminate_instance [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server do_terminate_instance(instance, bdms) [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py", line 414, in inner [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server return f(*args, **kwargs) [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3322, in do_terminate_instance [ 2134.975884] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3315, in do_terminate_instance [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server self._delete_instance(context, instance, bdms) [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3250, in _delete_instance [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server self._shutdown_instance(context, instance, bdms) [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3144, in _shutdown_instance [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server self._try_deallocate_network(context, instance, requested_networks) [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3058, in _try_deallocate_network [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server with excutils.save_and_reraise_exception(): [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 227, in __exit__ [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server self.force_reraise() [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_utils/excutils.py", line 200, in force_reraise [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server raise self.value [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3056, in _try_deallocate_network [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server _deallocate_network_with_retries() [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 436, in func [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server return evt.wait() [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server result = hub.switch() [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server return self.greenlet.switch() [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 150, in _run_loop [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server result = func(*self.args, **self.kw) [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py", line 407, in _func [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server result = f(*args, **kwargs) [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 3045, in _deallocate_network_with_retries [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server self._deallocate_network( [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/compute/manager.py", line 2265, in _deallocate_network [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server self.network_api.deallocate_for_instance( [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 1806, in deallocate_for_instance [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server data = neutron.list_ports(**search_opts) [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 815, in list_ports [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server return self.list('ports', self.ports_path, retrieve_all, [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 372, in list [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server for r in self._pagination(collection, path, **params): [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 387, in _pagination [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server res = self.get(path, params=params) [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 356, in get [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server return self.retry_request("GET", action, body=body, [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 333, in retry_request [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server return self.do_request(method, action, body=body, [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 196, in wrapper [ 2134.977504] env[68194]: ERROR oslo_messaging.rpc.server ret = obj(*args, **kwargs) [ 2134.979109] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/data/venv/lib/python3.10/site-packages/neutronclient/v2_0/client.py", line 297, in do_request [ 2134.979109] env[68194]: ERROR oslo_messaging.rpc.server self._handle_fault_response(status_code, replybody, resp) [ 2134.979109] env[68194]: ERROR oslo_messaging.rpc.server File "/opt/stack/nova/nova/network/neutron.py", line 212, in wrapper [ 2134.979109] env[68194]: ERROR oslo_messaging.rpc.server raise exception.NeutronAdminCredentialConfigurationInvalid() [ 2134.979109] env[68194]: ERROR oslo_messaging.rpc.server nova.exception.NeutronAdminCredentialConfigurationInvalid: Networking client is experiencing an unauthorized exception. [ 2134.979109] env[68194]: ERROR oslo_messaging.rpc.server [ 2148.425395] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2148.425773] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2148.425826] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2150.417162] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2150.417564] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2150.417564] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2150.436950] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2150.437164] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2150.437283] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2150.437410] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2150.437533] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2150.437659] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2150.437780] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2150.437906] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2150.438389] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2151.417075] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2151.417075] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2152.412231] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2152.430181] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2153.416755] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2153.428741] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2153.428965] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2153.429137] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2153.429293] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2153.430446] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e0cd478f-3e19-4489-84af-24410c0dbca8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.439059] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3682a16e-370e-4e95-a43e-48d5366c5c72 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.452826] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-523a2c99-f845-46bb-b67e-5b697edaf96e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.458928] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07851df1-ea9b-4825-a6ee-a35313f703d9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.488569] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180934MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2153.488739] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2153.488901] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2153.548086] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance bd9479c5-a9f5-47a6-b731-f0bf4633b688 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2153.548255] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2153.548385] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2153.548506] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8f84a8dc-6908-463c-85e3-f5189e8ca71d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2153.548624] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5fb0537f-884d-421c-9f47-ec8fd7236e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2153.548742] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 073be13d-9a6d-4cfc-997b-f6b61710790a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2153.548860] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance c2c4ea7a-0be6-48ec-af75-712929e48a5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2153.549046] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2153.549186] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2153.628797] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-095b4635-6a76-464a-a690-db2b303ae5c4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.636384] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5911526b-66cc-48ba-97ec-1488c67557b4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.667364] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2dd25562-c43b-4c5b-8beb-13c9e7f29cc7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.674373] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-1c70bd33-f65e-4e5a-baca-198da88ee014 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2153.687380] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2153.695886] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2153.711931] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2153.712210] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.223s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2155.708141] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2183.775908] env[68194]: WARNING oslo_vmware.rw_handles [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2183.775908] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2183.775908] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2183.775908] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2183.775908] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2183.775908] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 2183.775908] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2183.775908] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2183.775908] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2183.775908] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2183.775908] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2183.775908] env[68194]: ERROR oslo_vmware.rw_handles [ 2183.778934] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/59815986-8a29-4fb8-848f-8dcf7f45d7bf/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2183.779562] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2183.779935] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Copying Virtual Disk [datastore1] vmware_temp/59815986-8a29-4fb8-848f-8dcf7f45d7bf/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/59815986-8a29-4fb8-848f-8dcf7f45d7bf/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2183.780376] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-1c103277-37d4-48bc-a4d8-0a7882cbc7b1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2183.790309] env[68194]: DEBUG oslo_vmware.api [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Waiting for the task: (returnval){ [ 2183.790309] env[68194]: value = "task-3466964" [ 2183.790309] env[68194]: _type = "Task" [ 2183.790309] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2183.798533] env[68194]: DEBUG oslo_vmware.api [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Task: {'id': task-3466964, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.302066] env[68194]: DEBUG oslo_vmware.exceptions [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2184.302066] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2184.302066] env[68194]: ERROR nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2184.302066] env[68194]: Faults: ['InvalidArgument'] [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Traceback (most recent call last): [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] yield resources [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] self.driver.spawn(context, instance, image_meta, [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] self._fetch_image_if_missing(context, vi) [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] image_cache(vi, tmp_image_ds_loc) [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] vm_util.copy_virtual_disk( [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] session._wait_for_task(vmdk_copy_task) [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] return self.wait_for_task(task_ref) [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] return evt.wait() [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] result = hub.switch() [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] return self.greenlet.switch() [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] self.f(*self.args, **self.kw) [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] raise exceptions.translate_fault(task_info.error) [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Faults: ['InvalidArgument'] [ 2184.302066] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] [ 2184.303322] env[68194]: INFO nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Terminating instance [ 2184.303929] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2184.304161] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2184.304395] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-466d9f27-8805-4bf2-a821-e7a94f60d62b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.306649] env[68194]: DEBUG nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2184.306846] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2184.307560] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5c9ba85-ba07-4183-9c16-84b9b60a2fc3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.313938] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2184.314180] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-ec480e13-e0ba-407a-8149-2a206fb99ae4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.316215] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2184.316390] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2184.317324] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-e097ba3d-6165-4412-874a-17c0d94db802 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.322227] env[68194]: DEBUG oslo_vmware.api [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Waiting for the task: (returnval){ [ 2184.322227] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52205cd1-4053-6fe3-177d-bb29a37095b4" [ 2184.322227] env[68194]: _type = "Task" [ 2184.322227] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.329188] env[68194]: DEBUG oslo_vmware.api [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52205cd1-4053-6fe3-177d-bb29a37095b4, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.380216] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2184.380427] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2184.380603] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Deleting the datastore file [datastore1] bd9479c5-a9f5-47a6-b731-f0bf4633b688 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2184.380866] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-09aeb08a-d98f-4e70-9e02-459b0fbafccf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.387192] env[68194]: DEBUG oslo_vmware.api [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Waiting for the task: (returnval){ [ 2184.387192] env[68194]: value = "task-3466966" [ 2184.387192] env[68194]: _type = "Task" [ 2184.387192] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2184.394546] env[68194]: DEBUG oslo_vmware.api [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Task: {'id': task-3466966, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2184.832667] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2184.832938] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Creating directory with path [datastore1] vmware_temp/89f1e01a-7147-4320-9a07-7d10d2974e67/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2184.833200] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3d4a6054-396b-43dd-9f2b-c4eac1fbc346 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.844674] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Created directory with path [datastore1] vmware_temp/89f1e01a-7147-4320-9a07-7d10d2974e67/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2184.844866] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Fetch image to [datastore1] vmware_temp/89f1e01a-7147-4320-9a07-7d10d2974e67/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2184.845052] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/89f1e01a-7147-4320-9a07-7d10d2974e67/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2184.845791] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ccc3af42-fc32-4dfe-b3fd-3b96b9f88c8e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.852481] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f6d82493-9d3a-4c45-a258-6e6c188871f6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.861424] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cd2f749b-5550-45b9-b8ab-e482e764b677 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.894280] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f3c689e8-6e8a-4d85-b02c-8920f6b3c658 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.901056] env[68194]: DEBUG oslo_vmware.api [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Task: {'id': task-3466966, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.073877} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2184.902485] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2184.902710] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2184.902897] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2184.903087] env[68194]: INFO nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2184.904814] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-a9dc0871-3727-4dcd-a3f9-ee1572406dfd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2184.906678] env[68194]: DEBUG nova.compute.claims [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2184.906850] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2184.907296] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2184.933076] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2184.985978] env[68194]: DEBUG oslo_vmware.rw_handles [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/89f1e01a-7147-4320-9a07-7d10d2974e67/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2185.046182] env[68194]: DEBUG oslo_vmware.rw_handles [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2185.046182] env[68194]: DEBUG oslo_vmware.rw_handles [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/89f1e01a-7147-4320-9a07-7d10d2974e67/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2185.094174] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a29a4c72-b466-4f42-affe-2f4f80098c19 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.101438] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-af11e13f-dce2-4ab8-9ce2-ce95d58a55e1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.130427] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c4e95f7e-3fd2-46e8-acae-9260579c0ba8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.136871] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-672d050d-c749-4781-b2a4-87626724f54b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.149504] env[68194]: DEBUG nova.compute.provider_tree [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2185.157383] env[68194]: DEBUG nova.scheduler.client.report [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2185.170657] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.264s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2185.171193] env[68194]: ERROR nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2185.171193] env[68194]: Faults: ['InvalidArgument'] [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Traceback (most recent call last): [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] self.driver.spawn(context, instance, image_meta, [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] self._fetch_image_if_missing(context, vi) [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] image_cache(vi, tmp_image_ds_loc) [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] vm_util.copy_virtual_disk( [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] session._wait_for_task(vmdk_copy_task) [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] return self.wait_for_task(task_ref) [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] return evt.wait() [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] result = hub.switch() [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] return self.greenlet.switch() [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] self.f(*self.args, **self.kw) [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] raise exceptions.translate_fault(task_info.error) [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Faults: ['InvalidArgument'] [ 2185.171193] env[68194]: ERROR nova.compute.manager [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] [ 2185.172185] env[68194]: DEBUG nova.compute.utils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2185.173537] env[68194]: DEBUG nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Build of instance bd9479c5-a9f5-47a6-b731-f0bf4633b688 was re-scheduled: A specified parameter was not correct: fileType [ 2185.173537] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2185.173939] env[68194]: DEBUG nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2185.174136] env[68194]: DEBUG nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2185.174312] env[68194]: DEBUG nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2185.174478] env[68194]: DEBUG nova.network.neutron [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2185.520598] env[68194]: DEBUG nova.network.neutron [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.531828] env[68194]: INFO nova.compute.manager [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Took 0.36 seconds to deallocate network for instance. [ 2185.628350] env[68194]: INFO nova.scheduler.client.report [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Deleted allocations for instance bd9479c5-a9f5-47a6-b731-f0bf4633b688 [ 2185.649752] env[68194]: DEBUG oslo_concurrency.lockutils [None req-58fde03c-5b21-4b5e-9a44-4a7c9dabd273 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 518.330s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2185.650008] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 354.177s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2185.650209] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] During sync_power_state the instance has a pending task (spawning). Skip. [ 2185.650501] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2185.650623] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 322.553s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2185.650839] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2185.651057] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2185.651232] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2185.653197] env[68194]: INFO nova.compute.manager [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Terminating instance [ 2185.654971] env[68194]: DEBUG nova.compute.manager [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2185.655199] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2185.655667] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-74a8680e-a555-40ab-8bbb-fef00ccde34a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.665574] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7aae2b14-0697-4ca5-8c46-ff1a86c4f1a1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2185.691404] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance bd9479c5-a9f5-47a6-b731-f0bf4633b688 could not be found. [ 2185.691614] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2185.691793] env[68194]: INFO nova.compute.manager [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2185.692055] env[68194]: DEBUG oslo.service.loopingcall [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2185.692622] env[68194]: DEBUG nova.compute.manager [-] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2185.692732] env[68194]: DEBUG nova.network.neutron [-] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2185.714629] env[68194]: DEBUG nova.network.neutron [-] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2185.722468] env[68194]: INFO nova.compute.manager [-] [instance: bd9479c5-a9f5-47a6-b731-f0bf4633b688] Took 0.03 seconds to deallocate network for instance. [ 2185.802889] env[68194]: DEBUG oslo_concurrency.lockutils [None req-9ed51ad9-f156-4bae-8426-12d2a694b6e2 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Lock "bd9479c5-a9f5-47a6-b731-f0bf4633b688" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.152s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2205.708544] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0fa5273f-93b8-47c2-8f1e-c5a0c63c9e70 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquiring lock "073be13d-9a6d-4cfc-997b-f6b61710790a" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2210.415930] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.416319] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2210.416358] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2212.415821] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2212.416166] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2212.416166] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2212.432193] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2212.432406] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2212.432484] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2212.432620] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2212.432764] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2212.432888] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2212.433080] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2212.433596] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2212.433796] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2213.417051] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2213.417051] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2213.417051] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2213.427883] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2213.428129] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2213.428299] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2213.428455] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2213.429573] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c6c53af1-8878-46cb-93c1-a4416d0e2ef8 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.438271] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-aa156b58-3c45-415b-8605-e0eef157240d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.451890] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-723d6f64-6b07-4aac-a927-057fd221b8e6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.458012] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f062202c-aaf3-4e28-b6dd-78a9e2da662b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.485612] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180954MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2213.485763] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2213.485948] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2213.538819] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2213.538976] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2213.539122] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8f84a8dc-6908-463c-85e3-f5189e8ca71d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2213.539247] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5fb0537f-884d-421c-9f47-ec8fd7236e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2213.539367] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 073be13d-9a6d-4cfc-997b-f6b61710790a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2213.539485] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance c2c4ea7a-0be6-48ec-af75-712929e48a5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2213.539663] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 6 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2213.539801] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1280MB phys_disk=200GB used_disk=6GB total_vcpus=48 used_vcpus=6 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2213.618195] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-16e1b1fe-4868-4216-8040-08369d6cfff3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.625749] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a1a4497d-0213-4859-b829-b912e7f936d2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.656590] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-ecab9024-e704-4975-9ad6-8c4892634fdf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.663384] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2e758dff-f571-40f5-b945-9ee13ea0373d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2213.675928] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2213.683813] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2213.696112] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2213.696292] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.210s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2216.692531] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2231.610162] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Acquiring lock "8c0511d2-b29c-469b-ac57-9bc2b9b6ff47" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2231.610493] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Lock "8c0511d2-b29c-469b-ac57-9bc2b9b6ff47" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2231.622552] env[68194]: DEBUG nova.compute.manager [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2231.674431] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2231.674823] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2231.676823] env[68194]: INFO nova.compute.claims [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2231.814369] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a4180274-59ee-4a14-aaa6-e8488a89c7ca {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.822086] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-25392195-e61e-403a-b9f3-7ce80aa2901c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.853689] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f756029a-a618-4a84-867f-0c06e424357c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.861109] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cee07d91-68f8-4256-be64-b3f631f31789 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2231.874505] env[68194]: DEBUG nova.compute.provider_tree [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2231.883899] env[68194]: DEBUG nova.scheduler.client.report [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2231.903037] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.228s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2231.903548] env[68194]: DEBUG nova.compute.manager [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2231.941355] env[68194]: DEBUG nova.compute.utils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2231.942842] env[68194]: DEBUG nova.compute.manager [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2231.943112] env[68194]: DEBUG nova.network.neutron [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2231.957715] env[68194]: DEBUG nova.compute.manager [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2232.006213] env[68194]: DEBUG nova.policy [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'c71dcaac06ba43a8be3ff263f0ec4736', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '9de522feaffb49bd953214a170075723', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 2232.027168] env[68194]: DEBUG nova.compute.manager [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2232.056292] env[68194]: DEBUG nova.virt.hardware [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2232.056541] env[68194]: DEBUG nova.virt.hardware [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2232.056782] env[68194]: DEBUG nova.virt.hardware [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2232.056977] env[68194]: DEBUG nova.virt.hardware [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2232.057146] env[68194]: DEBUG nova.virt.hardware [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2232.057293] env[68194]: DEBUG nova.virt.hardware [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2232.057500] env[68194]: DEBUG nova.virt.hardware [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2232.057659] env[68194]: DEBUG nova.virt.hardware [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2232.057825] env[68194]: DEBUG nova.virt.hardware [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2232.057986] env[68194]: DEBUG nova.virt.hardware [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2232.058173] env[68194]: DEBUG nova.virt.hardware [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2232.059123] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-dc5007c8-fbee-4733-9333-3ffd6e357542 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.068250] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0589f469-2b86-40f5-bec7-e42270f7ec29 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2232.285706] env[68194]: DEBUG nova.network.neutron [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Successfully created port: 32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2232.872854] env[68194]: DEBUG nova.compute.manager [req-fae4fa46-ee6c-4b1a-bb1a-771c1f207cc8 req-a5919a3c-3771-4974-bf9f-3ea6135971a5 service nova] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Received event network-vif-plugged-32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2232.873432] env[68194]: DEBUG oslo_concurrency.lockutils [req-fae4fa46-ee6c-4b1a-bb1a-771c1f207cc8 req-a5919a3c-3771-4974-bf9f-3ea6135971a5 service nova] Acquiring lock "8c0511d2-b29c-469b-ac57-9bc2b9b6ff47-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2232.873432] env[68194]: DEBUG oslo_concurrency.lockutils [req-fae4fa46-ee6c-4b1a-bb1a-771c1f207cc8 req-a5919a3c-3771-4974-bf9f-3ea6135971a5 service nova] Lock "8c0511d2-b29c-469b-ac57-9bc2b9b6ff47-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2232.873432] env[68194]: DEBUG oslo_concurrency.lockutils [req-fae4fa46-ee6c-4b1a-bb1a-771c1f207cc8 req-a5919a3c-3771-4974-bf9f-3ea6135971a5 service nova] Lock "8c0511d2-b29c-469b-ac57-9bc2b9b6ff47-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2232.873549] env[68194]: DEBUG nova.compute.manager [req-fae4fa46-ee6c-4b1a-bb1a-771c1f207cc8 req-a5919a3c-3771-4974-bf9f-3ea6135971a5 service nova] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] No waiting events found dispatching network-vif-plugged-32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2232.873737] env[68194]: WARNING nova.compute.manager [req-fae4fa46-ee6c-4b1a-bb1a-771c1f207cc8 req-a5919a3c-3771-4974-bf9f-3ea6135971a5 service nova] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Received unexpected event network-vif-plugged-32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe for instance with vm_state building and task_state spawning. [ 2232.966841] env[68194]: DEBUG nova.network.neutron [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Successfully updated port: 32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2232.977348] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Acquiring lock "refresh_cache-8c0511d2-b29c-469b-ac57-9bc2b9b6ff47" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2232.977504] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Acquired lock "refresh_cache-8c0511d2-b29c-469b-ac57-9bc2b9b6ff47" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2232.977656] env[68194]: DEBUG nova.network.neutron [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2233.015825] env[68194]: DEBUG nova.network.neutron [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2233.177139] env[68194]: DEBUG nova.network.neutron [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Updating instance_info_cache with network_info: [{"id": "32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe", "address": "fa:16:3e:f2:97:1b", "network": {"id": "de06b928-958a-4d12-9fe6-5bbc9d9fe4d9", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1309240725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9de522feaffb49bd953214a170075723", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32ee6c5d-33", "ovs_interfaceid": "32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2233.190396] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Releasing lock "refresh_cache-8c0511d2-b29c-469b-ac57-9bc2b9b6ff47" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2233.190780] env[68194]: DEBUG nova.compute.manager [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Instance network_info: |[{"id": "32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe", "address": "fa:16:3e:f2:97:1b", "network": {"id": "de06b928-958a-4d12-9fe6-5bbc9d9fe4d9", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1309240725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9de522feaffb49bd953214a170075723", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32ee6c5d-33", "ovs_interfaceid": "32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2233.191240] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:f2:97:1b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '35e463c7-7d78-4d66-8efd-6127b1f3ee17', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2233.199187] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Creating folder: Project (9de522feaffb49bd953214a170075723). Parent ref: group-v692426. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2233.199739] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-1449d8a3-679b-4dfb-9296-6f86818340cd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.211228] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Created folder: Project (9de522feaffb49bd953214a170075723) in parent group-v692426. [ 2233.211408] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Creating folder: Instances. Parent ref: group-v692541. {{(pid=68194) create_folder /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1589}} [ 2233.211625] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateFolder with opID=oslo.vmware-80a2c097-fa1a-47c1-8822-00f26c5dd81b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.222966] env[68194]: INFO nova.virt.vmwareapi.vm_util [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Created folder: Instances in parent group-v692541. [ 2233.223193] env[68194]: DEBUG oslo.service.loopingcall [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2233.223369] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2233.223554] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-b926fdef-178c-470b-9e6d-08aea5b571e0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.243415] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2233.243415] env[68194]: value = "task-3466969" [ 2233.243415] env[68194]: _type = "Task" [ 2233.243415] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2233.251649] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466969, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2233.752553] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466969, 'name': CreateVM_Task, 'duration_secs': 0.289233} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2233.752846] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2233.753565] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2233.753847] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2233.754219] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2233.754466] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d94f77ff-e792-4a59-bae4-3f5837ce2b3c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2233.758626] env[68194]: DEBUG oslo_vmware.api [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Waiting for the task: (returnval){ [ 2233.758626] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52e17233-b1e2-aef5-d570-72b188bc8d84" [ 2233.758626] env[68194]: _type = "Task" [ 2233.758626] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2233.765883] env[68194]: DEBUG oslo_vmware.api [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52e17233-b1e2-aef5-d570-72b188bc8d84, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2234.269291] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2234.269591] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2234.269756] env[68194]: DEBUG oslo_concurrency.lockutils [None req-0211fd11-4712-45c5-aa1a-e6b933d9c169 tempest-ServerAddressesNegativeTestJSON-902547876 tempest-ServerAddressesNegativeTestJSON-902547876-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2234.902417] env[68194]: DEBUG nova.compute.manager [req-bee3b03f-69b2-4fbb-8be7-fd7ad5172fd7 req-cc2d5db7-edb7-4c0c-b87d-2970d866278d service nova] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Received event network-changed-32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2234.902571] env[68194]: DEBUG nova.compute.manager [req-bee3b03f-69b2-4fbb-8be7-fd7ad5172fd7 req-cc2d5db7-edb7-4c0c-b87d-2970d866278d service nova] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Refreshing instance network info cache due to event network-changed-32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2234.902834] env[68194]: DEBUG oslo_concurrency.lockutils [req-bee3b03f-69b2-4fbb-8be7-fd7ad5172fd7 req-cc2d5db7-edb7-4c0c-b87d-2970d866278d service nova] Acquiring lock "refresh_cache-8c0511d2-b29c-469b-ac57-9bc2b9b6ff47" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2234.902995] env[68194]: DEBUG oslo_concurrency.lockutils [req-bee3b03f-69b2-4fbb-8be7-fd7ad5172fd7 req-cc2d5db7-edb7-4c0c-b87d-2970d866278d service nova] Acquired lock "refresh_cache-8c0511d2-b29c-469b-ac57-9bc2b9b6ff47" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2234.903188] env[68194]: DEBUG nova.network.neutron [req-bee3b03f-69b2-4fbb-8be7-fd7ad5172fd7 req-cc2d5db7-edb7-4c0c-b87d-2970d866278d service nova] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Refreshing network info cache for port 32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2234.937615] env[68194]: WARNING oslo_vmware.rw_handles [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2234.937615] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2234.937615] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2234.937615] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2234.937615] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2234.937615] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 2234.937615] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2234.937615] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2234.937615] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2234.937615] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2234.937615] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2234.937615] env[68194]: ERROR oslo_vmware.rw_handles [ 2234.937987] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/89f1e01a-7147-4320-9a07-7d10d2974e67/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2234.940373] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2234.940635] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Copying Virtual Disk [datastore1] vmware_temp/89f1e01a-7147-4320-9a07-7d10d2974e67/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/89f1e01a-7147-4320-9a07-7d10d2974e67/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2234.941174] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-88bc8ce9-4384-42d3-8959-2bcb867cee88 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2234.951579] env[68194]: DEBUG oslo_vmware.api [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Waiting for the task: (returnval){ [ 2234.951579] env[68194]: value = "task-3466970" [ 2234.951579] env[68194]: _type = "Task" [ 2234.951579] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2234.958983] env[68194]: DEBUG oslo_vmware.api [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Task: {'id': task-3466970, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.151443] env[68194]: DEBUG nova.network.neutron [req-bee3b03f-69b2-4fbb-8be7-fd7ad5172fd7 req-cc2d5db7-edb7-4c0c-b87d-2970d866278d service nova] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Updated VIF entry in instance network info cache for port 32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2235.151833] env[68194]: DEBUG nova.network.neutron [req-bee3b03f-69b2-4fbb-8be7-fd7ad5172fd7 req-cc2d5db7-edb7-4c0c-b87d-2970d866278d service nova] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Updating instance_info_cache with network_info: [{"id": "32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe", "address": "fa:16:3e:f2:97:1b", "network": {"id": "de06b928-958a-4d12-9fe6-5bbc9d9fe4d9", "bridge": "br-int", "label": "tempest-ServerAddressesNegativeTestJSON-1309240725-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "9de522feaffb49bd953214a170075723", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "35e463c7-7d78-4d66-8efd-6127b1f3ee17", "external-id": "nsx-vlan-transportzone-175", "segmentation_id": 175, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap32ee6c5d-33", "ovs_interfaceid": "32ee6c5d-33cc-491c-a2e0-0ccc32c5d3fe", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2235.161455] env[68194]: DEBUG oslo_concurrency.lockutils [req-bee3b03f-69b2-4fbb-8be7-fd7ad5172fd7 req-cc2d5db7-edb7-4c0c-b87d-2970d866278d service nova] Releasing lock "refresh_cache-8c0511d2-b29c-469b-ac57-9bc2b9b6ff47" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2235.460978] env[68194]: DEBUG oslo_vmware.exceptions [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2235.461360] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2235.461826] env[68194]: ERROR nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2235.461826] env[68194]: Faults: ['InvalidArgument'] [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Traceback (most recent call last): [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] yield resources [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] self.driver.spawn(context, instance, image_meta, [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] self._fetch_image_if_missing(context, vi) [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] image_cache(vi, tmp_image_ds_loc) [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] vm_util.copy_virtual_disk( [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] session._wait_for_task(vmdk_copy_task) [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] return self.wait_for_task(task_ref) [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] return evt.wait() [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] result = hub.switch() [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] return self.greenlet.switch() [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] self.f(*self.args, **self.kw) [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] raise exceptions.translate_fault(task_info.error) [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Faults: ['InvalidArgument'] [ 2235.461826] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] [ 2235.462594] env[68194]: INFO nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Terminating instance [ 2235.463876] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2235.464104] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2235.464349] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-3b002283-9288-4759-b35c-ff1546200a16 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.466650] env[68194]: DEBUG nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2235.466845] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2235.467559] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-009865be-89d2-42be-9eaf-3c4efdd66be2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.475609] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2235.476552] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-483e1583-c9fb-4049-a9ed-5b0b85514d16 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.477854] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2235.478050] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2235.478688] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-06561387-7c99-4bcf-9be3-d2d9359d697b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.483932] env[68194]: DEBUG oslo_vmware.api [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Waiting for the task: (returnval){ [ 2235.483932] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]527ac76a-2fe3-8351-825f-32a828aced10" [ 2235.483932] env[68194]: _type = "Task" [ 2235.483932] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.490521] env[68194]: DEBUG oslo_vmware.api [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]527ac76a-2fe3-8351-825f-32a828aced10, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.545823] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2235.546064] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2235.546235] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Deleting the datastore file [datastore1] 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2235.546506] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-fa3c4ce3-65cc-4506-a594-606aba1d94dd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2235.552711] env[68194]: DEBUG oslo_vmware.api [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Waiting for the task: (returnval){ [ 2235.552711] env[68194]: value = "task-3466972" [ 2235.552711] env[68194]: _type = "Task" [ 2235.552711] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2235.560170] env[68194]: DEBUG oslo_vmware.api [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Task: {'id': task-3466972, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2235.993844] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2235.994131] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Creating directory with path [datastore1] vmware_temp/76219d70-763f-4e63-9c0b-39893a171067/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2235.994373] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-4d0f7a43-f63f-458f-8603-f61d73330a89 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.005327] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Created directory with path [datastore1] vmware_temp/76219d70-763f-4e63-9c0b-39893a171067/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2236.005517] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Fetch image to [datastore1] vmware_temp/76219d70-763f-4e63-9c0b-39893a171067/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2236.005686] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/76219d70-763f-4e63-9c0b-39893a171067/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2236.006416] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-14a2a4b1-292f-4d8c-9076-889e4d8e341d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.013149] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9b890069-826b-44c4-8633-d25e4c294cb9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.022141] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-05f35448-e19f-4420-949b-1fb146160be5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.051375] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0346c36f-3f11-42e0-a321-c1e85d13c505 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.061354] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-d7546964-7d35-401b-bdb1-2930366ac3b4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.062940] env[68194]: DEBUG oslo_vmware.api [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Task: {'id': task-3466972, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.069151} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2236.063244] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2236.063449] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2236.063701] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2236.063899] env[68194]: INFO nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2236.065927] env[68194]: DEBUG nova.compute.claims [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2236.066103] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2236.066319] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2236.096605] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2236.148069] env[68194]: DEBUG oslo_vmware.rw_handles [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76219d70-763f-4e63-9c0b-39893a171067/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2236.209863] env[68194]: DEBUG oslo_vmware.rw_handles [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2236.210062] env[68194]: DEBUG oslo_vmware.rw_handles [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Closing write handle for https://esx7c1n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/76219d70-763f-4e63-9c0b-39893a171067/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2236.251782] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-47299b92-5dd4-473f-a989-23a2833559fa {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.258603] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7a8dfdf7-2b85-4a12-a5fd-fec78c3ad53a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.287731] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8e0c4215-3708-4c9c-81c3-3529499d6e8d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.294846] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-78febf61-d405-4bed-a9e4-0389e2362bfc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.308689] env[68194]: DEBUG nova.compute.provider_tree [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2236.317136] env[68194]: DEBUG nova.scheduler.client.report [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2236.333356] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.267s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2236.333929] env[68194]: ERROR nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2236.333929] env[68194]: Faults: ['InvalidArgument'] [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Traceback (most recent call last): [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] self.driver.spawn(context, instance, image_meta, [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] self._fetch_image_if_missing(context, vi) [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] image_cache(vi, tmp_image_ds_loc) [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] vm_util.copy_virtual_disk( [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] session._wait_for_task(vmdk_copy_task) [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] return self.wait_for_task(task_ref) [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] return evt.wait() [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] result = hub.switch() [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] return self.greenlet.switch() [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] self.f(*self.args, **self.kw) [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] raise exceptions.translate_fault(task_info.error) [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Faults: ['InvalidArgument'] [ 2236.333929] env[68194]: ERROR nova.compute.manager [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] [ 2236.334730] env[68194]: DEBUG nova.compute.utils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2236.336012] env[68194]: DEBUG nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Build of instance 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 was re-scheduled: A specified parameter was not correct: fileType [ 2236.336012] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2236.336384] env[68194]: DEBUG nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2236.336556] env[68194]: DEBUG nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2236.336712] env[68194]: DEBUG nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2236.336877] env[68194]: DEBUG nova.network.neutron [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2236.618475] env[68194]: DEBUG nova.network.neutron [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2236.630019] env[68194]: INFO nova.compute.manager [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Took 0.29 seconds to deallocate network for instance. [ 2236.712509] env[68194]: INFO nova.scheduler.client.report [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Deleted allocations for instance 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 [ 2236.735084] env[68194]: DEBUG oslo_concurrency.lockutils [None req-47a67d38-d496-41d7-a719-cbf9ffb24d37 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 528.191s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2236.735465] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" acquired by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: waited 405.263s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2236.735721] env[68194]: INFO nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] During sync_power_state the instance has a pending task (spawning). Skip. [ 2236.735930] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" "released" by "nova.compute.manager.ComputeManager._sync_power_states.._sync..query_driver_power_state_and_sync" :: held 0.001s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2236.736795] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 135.536s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2236.736795] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2236.736952] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2236.738027] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2236.739026] env[68194]: INFO nova.compute.manager [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Terminating instance [ 2236.740801] env[68194]: DEBUG nova.compute.manager [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2236.741107] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2236.741466] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-82daab2c-8822-47bf-9b78-f7e6e4892a23 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.750920] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-046c23a0-dae9-496f-a146-2aab154985b7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2236.783308] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15 could not be found. [ 2236.783530] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2236.784185] env[68194]: INFO nova.compute.manager [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2236.784185] env[68194]: DEBUG oslo.service.loopingcall [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2236.784185] env[68194]: DEBUG nova.compute.manager [-] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2236.784325] env[68194]: DEBUG nova.network.neutron [-] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2236.812076] env[68194]: DEBUG nova.network.neutron [-] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2236.820059] env[68194]: INFO nova.compute.manager [-] [instance: 1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15] Took 0.04 seconds to deallocate network for instance. [ 2236.907598] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b6a71cf1-49c7-4b8f-9d23-b32b7fbb4627 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "1d01a2fd-b0e6-436a-b9d4-0ca9a9e2ec15" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.171s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2243.425300] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "727af4d2-ec4c-4ea3-baae-a32a70d03e0e" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2243.425606] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "727af4d2-ec4c-4ea3-baae-a32a70d03e0e" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2243.436412] env[68194]: DEBUG nova.compute.manager [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2243.485907] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2243.486167] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2243.487536] env[68194]: INFO nova.compute.claims [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2243.608830] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-36940b95-52ae-4b2d-99fc-9c3c33f9a0ae {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.616312] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a9b418fa-3d5b-470b-9aa9-796df100253b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.646273] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fda8a82a-1ebb-46ea-aeda-da89ff00bc1f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.652823] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-bdff5140-d0ee-4f02-9027-bb9ea1c3d50d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.665659] env[68194]: DEBUG nova.compute.provider_tree [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2243.674683] env[68194]: DEBUG nova.scheduler.client.report [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2243.687132] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.201s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2243.687550] env[68194]: DEBUG nova.compute.manager [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2243.717747] env[68194]: DEBUG nova.compute.utils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2243.718876] env[68194]: DEBUG nova.compute.manager [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2243.719063] env[68194]: DEBUG nova.network.neutron [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2243.727700] env[68194]: DEBUG nova.compute.manager [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2243.778814] env[68194]: DEBUG nova.policy [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': '905b97edce374ad5a240d61220f66f80', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': '05181674899f44e7bb6d234643c3e6b6', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 2243.790877] env[68194]: DEBUG nova.compute.manager [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2243.812235] env[68194]: DEBUG nova.virt.hardware [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2243.812480] env[68194]: DEBUG nova.virt.hardware [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2243.812907] env[68194]: DEBUG nova.virt.hardware [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2243.812907] env[68194]: DEBUG nova.virt.hardware [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2243.813052] env[68194]: DEBUG nova.virt.hardware [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2243.813209] env[68194]: DEBUG nova.virt.hardware [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2243.813420] env[68194]: DEBUG nova.virt.hardware [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2243.813618] env[68194]: DEBUG nova.virt.hardware [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2243.814096] env[68194]: DEBUG nova.virt.hardware [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2243.814096] env[68194]: DEBUG nova.virt.hardware [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2243.814226] env[68194]: DEBUG nova.virt.hardware [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2243.815189] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-6bd59a0e-d4eb-494e-b06d-093b4d0b8346 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2243.822790] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b963fb0b-4ff1-402b-a477-e4bdf2f1e9d2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2244.078557] env[68194]: DEBUG nova.network.neutron [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Successfully created port: 1709585c-3f57-41c2-9130-2ac4a7b91285 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2244.620188] env[68194]: DEBUG nova.compute.manager [req-f4245c46-c612-402d-8f4a-b3fa46f3fe36 req-6b79ddcf-a613-4098-995c-59cca198750d service nova] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Received event network-vif-plugged-1709585c-3f57-41c2-9130-2ac4a7b91285 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2244.620434] env[68194]: DEBUG oslo_concurrency.lockutils [req-f4245c46-c612-402d-8f4a-b3fa46f3fe36 req-6b79ddcf-a613-4098-995c-59cca198750d service nova] Acquiring lock "727af4d2-ec4c-4ea3-baae-a32a70d03e0e-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2244.620576] env[68194]: DEBUG oslo_concurrency.lockutils [req-f4245c46-c612-402d-8f4a-b3fa46f3fe36 req-6b79ddcf-a613-4098-995c-59cca198750d service nova] Lock "727af4d2-ec4c-4ea3-baae-a32a70d03e0e-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2244.620731] env[68194]: DEBUG oslo_concurrency.lockutils [req-f4245c46-c612-402d-8f4a-b3fa46f3fe36 req-6b79ddcf-a613-4098-995c-59cca198750d service nova] Lock "727af4d2-ec4c-4ea3-baae-a32a70d03e0e-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2244.620892] env[68194]: DEBUG nova.compute.manager [req-f4245c46-c612-402d-8f4a-b3fa46f3fe36 req-6b79ddcf-a613-4098-995c-59cca198750d service nova] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] No waiting events found dispatching network-vif-plugged-1709585c-3f57-41c2-9130-2ac4a7b91285 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2244.621212] env[68194]: WARNING nova.compute.manager [req-f4245c46-c612-402d-8f4a-b3fa46f3fe36 req-6b79ddcf-a613-4098-995c-59cca198750d service nova] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Received unexpected event network-vif-plugged-1709585c-3f57-41c2-9130-2ac4a7b91285 for instance with vm_state building and task_state spawning. [ 2244.630848] env[68194]: DEBUG nova.network.neutron [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Successfully updated port: 1709585c-3f57-41c2-9130-2ac4a7b91285 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2244.642051] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "refresh_cache-727af4d2-ec4c-4ea3-baae-a32a70d03e0e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2244.642051] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired lock "refresh_cache-727af4d2-ec4c-4ea3-baae-a32a70d03e0e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2244.642051] env[68194]: DEBUG nova.network.neutron [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2244.678011] env[68194]: DEBUG nova.network.neutron [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2244.983776] env[68194]: DEBUG nova.network.neutron [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Updating instance_info_cache with network_info: [{"id": "1709585c-3f57-41c2-9130-2ac4a7b91285", "address": "fa:16:3e:07:6e:23", "network": {"id": "327b172e-477f-4070-9bcb-c1216237bde9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-980534088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05181674899f44e7bb6d234643c3e6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1709585c-3f", "ovs_interfaceid": "1709585c-3f57-41c2-9130-2ac4a7b91285", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2244.997094] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Releasing lock "refresh_cache-727af4d2-ec4c-4ea3-baae-a32a70d03e0e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2244.997385] env[68194]: DEBUG nova.compute.manager [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Instance network_info: |[{"id": "1709585c-3f57-41c2-9130-2ac4a7b91285", "address": "fa:16:3e:07:6e:23", "network": {"id": "327b172e-477f-4070-9bcb-c1216237bde9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-980534088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05181674899f44e7bb6d234643c3e6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1709585c-3f", "ovs_interfaceid": "1709585c-3f57-41c2-9130-2ac4a7b91285", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2244.997764] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:07:6e:23', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': 'c3291573-fad8-48cc-a965-c3554e7cee4e', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '1709585c-3f57-41c2-9130-2ac4a7b91285', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2245.006151] env[68194]: DEBUG oslo.service.loopingcall [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2245.006610] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2245.008117] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-649336d0-94a3-4e18-b39c-b784204a4a3f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.028453] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2245.028453] env[68194]: value = "task-3466973" [ 2245.028453] env[68194]: _type = "Task" [ 2245.028453] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.035926] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466973, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2245.538626] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466973, 'name': CreateVM_Task, 'duration_secs': 0.311813} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2245.538896] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2245.539467] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2245.539643] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2245.539951] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2245.540213] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-0adadc13-45e5-4558-b124-4043546fb423 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2245.544480] env[68194]: DEBUG oslo_vmware.api [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 2245.544480] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52bd28cb-0b06-ddfc-0c9f-8133654a1ec0" [ 2245.544480] env[68194]: _type = "Task" [ 2245.544480] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2245.551584] env[68194]: DEBUG oslo_vmware.api [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52bd28cb-0b06-ddfc-0c9f-8133654a1ec0, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2246.055508] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2246.055771] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2246.056128] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ef1767a5-bd3e-4be0-9caf-6da798789762 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2246.643988] env[68194]: DEBUG nova.compute.manager [req-78e67d4f-83c2-44da-90d8-4684b349f8c8 req-9af96d82-4db3-48d1-bbcc-43559bcc33f2 service nova] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Received event network-changed-1709585c-3f57-41c2-9130-2ac4a7b91285 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2246.643988] env[68194]: DEBUG nova.compute.manager [req-78e67d4f-83c2-44da-90d8-4684b349f8c8 req-9af96d82-4db3-48d1-bbcc-43559bcc33f2 service nova] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Refreshing instance network info cache due to event network-changed-1709585c-3f57-41c2-9130-2ac4a7b91285. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2246.644174] env[68194]: DEBUG oslo_concurrency.lockutils [req-78e67d4f-83c2-44da-90d8-4684b349f8c8 req-9af96d82-4db3-48d1-bbcc-43559bcc33f2 service nova] Acquiring lock "refresh_cache-727af4d2-ec4c-4ea3-baae-a32a70d03e0e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2246.644328] env[68194]: DEBUG oslo_concurrency.lockutils [req-78e67d4f-83c2-44da-90d8-4684b349f8c8 req-9af96d82-4db3-48d1-bbcc-43559bcc33f2 service nova] Acquired lock "refresh_cache-727af4d2-ec4c-4ea3-baae-a32a70d03e0e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2246.644494] env[68194]: DEBUG nova.network.neutron [req-78e67d4f-83c2-44da-90d8-4684b349f8c8 req-9af96d82-4db3-48d1-bbcc-43559bcc33f2 service nova] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Refreshing network info cache for port 1709585c-3f57-41c2-9130-2ac4a7b91285 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2246.869790] env[68194]: DEBUG nova.network.neutron [req-78e67d4f-83c2-44da-90d8-4684b349f8c8 req-9af96d82-4db3-48d1-bbcc-43559bcc33f2 service nova] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Updated VIF entry in instance network info cache for port 1709585c-3f57-41c2-9130-2ac4a7b91285. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2246.870167] env[68194]: DEBUG nova.network.neutron [req-78e67d4f-83c2-44da-90d8-4684b349f8c8 req-9af96d82-4db3-48d1-bbcc-43559bcc33f2 service nova] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Updating instance_info_cache with network_info: [{"id": "1709585c-3f57-41c2-9130-2ac4a7b91285", "address": "fa:16:3e:07:6e:23", "network": {"id": "327b172e-477f-4070-9bcb-c1216237bde9", "bridge": "br-int", "label": "tempest-DeleteServersTestJSON-980534088-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.13", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "05181674899f44e7bb6d234643c3e6b6", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "c3291573-fad8-48cc-a965-c3554e7cee4e", "external-id": "nsx-vlan-transportzone-115", "segmentation_id": 115, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap1709585c-3f", "ovs_interfaceid": "1709585c-3f57-41c2-9130-2ac4a7b91285", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2246.879181] env[68194]: DEBUG oslo_concurrency.lockutils [req-78e67d4f-83c2-44da-90d8-4684b349f8c8 req-9af96d82-4db3-48d1-bbcc-43559bcc33f2 service nova] Releasing lock "refresh_cache-727af4d2-ec4c-4ea3-baae-a32a70d03e0e" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2258.274340] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e14f0828-e889-4a13-9006-e1f9fc4ea0c6 tempest-AttachVolumeNegativeTest-216101607 tempest-AttachVolumeNegativeTest-216101607-project-member] Acquiring lock "c2c4ea7a-0be6-48ec-af75-712929e48a5b" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2271.417037] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2271.417436] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2272.418401] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2273.415943] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2273.416275] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2274.416708] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2274.418025] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2274.418025] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2274.437309] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2274.437465] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2274.437598] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2274.437727] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2274.437853] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2274.437980] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2274.438261] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2274.438403] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2274.438868] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2274.439072] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2275.416297] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2275.434273] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2275.467250] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2275.467478] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2275.467649] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2275.467804] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2275.468908] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-153e2c3d-5148-4272-b7f2-d2fcaa22360b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.476938] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060a3b2d-6b50-409b-92e3-05c8840d63ba {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.490622] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-63d5d9fe-135b-43c1-8210-7b61cd6ef13c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.496622] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-93681c97-aa69-4d75-a530-7e9008e763bc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.524605] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180972MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2275.524745] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2275.524933] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2275.583257] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2275.583413] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8f84a8dc-6908-463c-85e3-f5189e8ca71d actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2275.583542] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5fb0537f-884d-421c-9f47-ec8fd7236e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2275.583665] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 073be13d-9a6d-4cfc-997b-f6b61710790a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2275.583809] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance c2c4ea7a-0be6-48ec-af75-712929e48a5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2275.583965] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2275.584106] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 727af4d2-ec4c-4ea3-baae-a32a70d03e0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2275.584292] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 7 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2275.584429] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1408MB phys_disk=200GB used_disk=7GB total_vcpus=48 used_vcpus=7 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2275.666860] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-7c2f44b6-8875-4fd1-a2f1-dd32bdfc6681 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.674417] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d4f12479-d0f9-4e52-8628-cb7c526008e7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.704232] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-54d0fbf5-2b59-47a9-ba4d-c2b5cc16fbb3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.710825] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5f323dee-5fca-4a8c-9b95-d2914aa10859 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2275.723335] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2275.731212] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2275.744575] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2275.744754] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.220s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2276.740122] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2282.979462] env[68194]: WARNING oslo_vmware.rw_handles [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2282.979462] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2282.979462] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2282.979462] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2282.979462] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2282.979462] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 2282.979462] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2282.979462] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2282.979462] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2282.979462] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2282.979462] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2282.979462] env[68194]: ERROR oslo_vmware.rw_handles [ 2282.980134] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/76219d70-763f-4e63-9c0b-39893a171067/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2282.982065] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2282.982311] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Copying Virtual Disk [datastore1] vmware_temp/76219d70-763f-4e63-9c0b-39893a171067/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/76219d70-763f-4e63-9c0b-39893a171067/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2282.982601] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-fc7de249-d449-4468-b16d-8bf2a878fb66 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2282.990898] env[68194]: DEBUG oslo_vmware.api [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Waiting for the task: (returnval){ [ 2282.990898] env[68194]: value = "task-3466974" [ 2282.990898] env[68194]: _type = "Task" [ 2282.990898] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2282.998544] env[68194]: DEBUG oslo_vmware.api [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Task: {'id': task-3466974, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.501559] env[68194]: DEBUG oslo_vmware.exceptions [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2283.501782] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2283.502358] env[68194]: ERROR nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2283.502358] env[68194]: Faults: ['InvalidArgument'] [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Traceback (most recent call last): [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] yield resources [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] self.driver.spawn(context, instance, image_meta, [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] self._fetch_image_if_missing(context, vi) [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] image_cache(vi, tmp_image_ds_loc) [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] vm_util.copy_virtual_disk( [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] session._wait_for_task(vmdk_copy_task) [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] return self.wait_for_task(task_ref) [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] return evt.wait() [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] result = hub.switch() [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] return self.greenlet.switch() [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] self.f(*self.args, **self.kw) [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] raise exceptions.translate_fault(task_info.error) [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Faults: ['InvalidArgument'] [ 2283.502358] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] [ 2283.503379] env[68194]: INFO nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Terminating instance [ 2283.504263] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2283.504469] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2283.504698] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-c442390e-0997-4f8e-b0f3-7a05e18174a3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.506789] env[68194]: DEBUG nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2283.506991] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2283.507687] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3f7d73a-1bed-4057-8739-0766dc07d84a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.514428] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2283.514623] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-f91ac5ab-3fb1-4b3d-8775-40d1bc7bf8d7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.516635] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2283.516806] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2283.517701] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-a347ea05-31d9-470d-8d3d-37b1a712a65f {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.522068] env[68194]: DEBUG oslo_vmware.api [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for the task: (returnval){ [ 2283.522068] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]527bcd75-61bc-8a14-074c-37e113f53bed" [ 2283.522068] env[68194]: _type = "Task" [ 2283.522068] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.528831] env[68194]: DEBUG oslo_vmware.api [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]527bcd75-61bc-8a14-074c-37e113f53bed, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2283.585226] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2283.585443] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2283.585623] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Deleting the datastore file [datastore1] 7142c793-cb3a-4bb0-87b6-c7fd5547f252 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2283.585877] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-c6ddbf6e-cc54-41e8-963d-2eb895969f20 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2283.592679] env[68194]: DEBUG oslo_vmware.api [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Waiting for the task: (returnval){ [ 2283.592679] env[68194]: value = "task-3466976" [ 2283.592679] env[68194]: _type = "Task" [ 2283.592679] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2283.599824] env[68194]: DEBUG oslo_vmware.api [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Task: {'id': task-3466976, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2284.033028] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2284.033381] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Creating directory with path [datastore1] vmware_temp/32a53fe1-8a29-425f-82dd-99a4f896d38f/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2284.033481] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-59658aec-a321-4f34-ad87-413ab9458da4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.044496] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Created directory with path [datastore1] vmware_temp/32a53fe1-8a29-425f-82dd-99a4f896d38f/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2284.044680] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Fetch image to [datastore1] vmware_temp/32a53fe1-8a29-425f-82dd-99a4f896d38f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2284.044850] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/32a53fe1-8a29-425f-82dd-99a4f896d38f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2284.045573] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f0af89b4-1e76-4c8c-bef0-ebabdb626a14 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.051793] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-75dbb55c-20b7-430b-b266-a84a2a43f4c2 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.060295] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-734d3048-9590-441b-aa5b-3b388b459859 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.089973] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-4aa92981-78bf-48b2-9500-d2f3267b7fdd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.097819] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-e48d8182-d3c7-43dc-ac72-beab33bcd11b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.101888] env[68194]: DEBUG oslo_vmware.api [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Task: {'id': task-3466976, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.063719} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2284.102421] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2284.102611] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2284.102830] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2284.103015] env[68194]: INFO nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2284.105036] env[68194]: DEBUG nova.compute.claims [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2284.105218] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2284.105430] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2284.121378] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2284.173238] env[68194]: DEBUG oslo_vmware.rw_handles [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/32a53fe1-8a29-425f-82dd-99a4f896d38f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2284.232597] env[68194]: DEBUG oslo_vmware.rw_handles [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2284.232783] env[68194]: DEBUG oslo_vmware.rw_handles [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Closing write handle for https://esx7c1n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/32a53fe1-8a29-425f-82dd-99a4f896d38f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2284.290832] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fb4f3781-ab9c-4309-908b-319ec99ecbb5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.297854] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3dd8ff3d-70da-4ad9-9f71-2776e4e3fc76 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.328636] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85edec81-3e88-4aa0-a80a-b57408ad8da5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.335729] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-239fd078-2821-4013-868d-acdeb45159c5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.348633] env[68194]: DEBUG nova.compute.provider_tree [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2284.357173] env[68194]: DEBUG nova.scheduler.client.report [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2284.372252] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.267s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2284.372779] env[68194]: ERROR nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2284.372779] env[68194]: Faults: ['InvalidArgument'] [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Traceback (most recent call last): [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] self.driver.spawn(context, instance, image_meta, [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] self._fetch_image_if_missing(context, vi) [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] image_cache(vi, tmp_image_ds_loc) [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] vm_util.copy_virtual_disk( [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] session._wait_for_task(vmdk_copy_task) [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] return self.wait_for_task(task_ref) [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] return evt.wait() [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] result = hub.switch() [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] return self.greenlet.switch() [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] self.f(*self.args, **self.kw) [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] raise exceptions.translate_fault(task_info.error) [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Faults: ['InvalidArgument'] [ 2284.372779] env[68194]: ERROR nova.compute.manager [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] [ 2284.373878] env[68194]: DEBUG nova.compute.utils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2284.375080] env[68194]: DEBUG nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Build of instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 was re-scheduled: A specified parameter was not correct: fileType [ 2284.375080] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2284.375460] env[68194]: DEBUG nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2284.375635] env[68194]: DEBUG nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2284.375794] env[68194]: DEBUG nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2284.375957] env[68194]: DEBUG nova.network.neutron [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2284.672694] env[68194]: DEBUG nova.network.neutron [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2284.684818] env[68194]: INFO nova.compute.manager [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Took 0.31 seconds to deallocate network for instance. [ 2284.774262] env[68194]: INFO nova.scheduler.client.report [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Deleted allocations for instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 [ 2284.799197] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2b1800aa-613d-4d17-8709-5f4af727f936 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "7142c793-cb3a-4bb0-87b6-c7fd5547f252" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 576.081s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2284.799484] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "7142c793-cb3a-4bb0-87b6-c7fd5547f252" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 379.752s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2284.799716] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Acquiring lock "7142c793-cb3a-4bb0-87b6-c7fd5547f252-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2284.799924] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "7142c793-cb3a-4bb0-87b6-c7fd5547f252-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2284.800129] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "7142c793-cb3a-4bb0-87b6-c7fd5547f252-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2284.802211] env[68194]: INFO nova.compute.manager [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Terminating instance [ 2284.804368] env[68194]: DEBUG nova.compute.manager [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2284.804569] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2284.805199] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-6c9f363c-818d-4954-9ec9-398c6eee1f8e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.814898] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cf8c64f1-b3f0-4f72-9438-8aeff0aaf50a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2284.840731] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 7142c793-cb3a-4bb0-87b6-c7fd5547f252 could not be found. [ 2284.840934] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2284.841125] env[68194]: INFO nova.compute.manager [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2284.841368] env[68194]: DEBUG oslo.service.loopingcall [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2284.841827] env[68194]: DEBUG nova.compute.manager [-] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2284.841932] env[68194]: DEBUG nova.network.neutron [-] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2284.866736] env[68194]: DEBUG nova.network.neutron [-] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2284.874321] env[68194]: INFO nova.compute.manager [-] [instance: 7142c793-cb3a-4bb0-87b6-c7fd5547f252] Took 0.03 seconds to deallocate network for instance. [ 2284.964162] env[68194]: DEBUG oslo_concurrency.lockutils [None req-e3c0c93c-12d4-4ff9-9dee-0819e6f2ffa0 tempest-ListImageFiltersTestJSON-871612778 tempest-ListImageFiltersTestJSON-871612778-project-member] Lock "7142c793-cb3a-4bb0-87b6-c7fd5547f252" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.165s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2333.416452] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2333.416841] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2333.416841] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2333.416948] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2333.805644] env[68194]: WARNING oslo_vmware.rw_handles [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2333.805644] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2333.805644] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2333.805644] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2333.805644] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2333.805644] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 2333.805644] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2333.805644] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2333.805644] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2333.805644] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2333.805644] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2333.805644] env[68194]: ERROR oslo_vmware.rw_handles [ 2333.806051] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/32a53fe1-8a29-425f-82dd-99a4f896d38f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2333.808334] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2333.808582] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Copying Virtual Disk [datastore1] vmware_temp/32a53fe1-8a29-425f-82dd-99a4f896d38f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/32a53fe1-8a29-425f-82dd-99a4f896d38f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2333.808873] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-0868b964-3bca-4f1d-a57a-8430c6adea90 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2333.820165] env[68194]: DEBUG oslo_vmware.api [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for the task: (returnval){ [ 2333.820165] env[68194]: value = "task-3466977" [ 2333.820165] env[68194]: _type = "Task" [ 2333.820165] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2333.828070] env[68194]: DEBUG oslo_vmware.api [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': task-3466977, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.330815] env[68194]: DEBUG oslo_vmware.exceptions [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2334.331373] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2334.332069] env[68194]: ERROR nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2334.332069] env[68194]: Faults: ['InvalidArgument'] [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Traceback (most recent call last): [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] yield resources [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] self.driver.spawn(context, instance, image_meta, [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] self._fetch_image_if_missing(context, vi) [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] image_cache(vi, tmp_image_ds_loc) [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] vm_util.copy_virtual_disk( [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] session._wait_for_task(vmdk_copy_task) [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] return self.wait_for_task(task_ref) [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] return evt.wait() [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] result = hub.switch() [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] return self.greenlet.switch() [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] self.f(*self.args, **self.kw) [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] raise exceptions.translate_fault(task_info.error) [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Faults: ['InvalidArgument'] [ 2334.332069] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] [ 2334.333950] env[68194]: INFO nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Terminating instance [ 2334.334011] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2334.334204] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2334.334451] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-d5e6067c-458a-40e6-b857-ff487a2dced9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.336605] env[68194]: DEBUG nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2334.336780] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2334.337495] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-db612061-8ce7-477a-8a26-aca4d1e41c58 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.344672] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2334.344877] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-a91ec384-99a4-4ca9-adb2-a2c1d2842acf {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.346942] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2334.347128] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2334.348032] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-5a149e40-69a3-4b9b-8560-f1e1e51ed73e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.352503] env[68194]: DEBUG oslo_vmware.api [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 2334.352503] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52dbe838-95b3-5dd9-1b53-fe862fe2a007" [ 2334.352503] env[68194]: _type = "Task" [ 2334.352503] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.359359] env[68194]: DEBUG oslo_vmware.api [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]52dbe838-95b3-5dd9-1b53-fe862fe2a007, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.411829] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2334.412084] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2334.412275] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Deleting the datastore file [datastore1] 8f84a8dc-6908-463c-85e3-f5189e8ca71d {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2334.412541] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-7b62ccd0-4508-408a-9a89-4b4c7a6abac5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.416260] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2334.416514] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2334.416752] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2334.418712] env[68194]: DEBUG oslo_vmware.api [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for the task: (returnval){ [ 2334.418712] env[68194]: value = "task-3466979" [ 2334.418712] env[68194]: _type = "Task" [ 2334.418712] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2334.426355] env[68194]: DEBUG oslo_vmware.api [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': task-3466979, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2334.862935] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2334.863279] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Creating directory with path [datastore1] vmware_temp/275e1ba8-d4db-4ea7-852a-c29a709e06b9/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2334.863523] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-dfcee018-ebd1-43f3-8ba2-bd32e57d4866 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.874138] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Created directory with path [datastore1] vmware_temp/275e1ba8-d4db-4ea7-852a-c29a709e06b9/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2334.874326] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Fetch image to [datastore1] vmware_temp/275e1ba8-d4db-4ea7-852a-c29a709e06b9/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2334.874498] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/275e1ba8-d4db-4ea7-852a-c29a709e06b9/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2334.875222] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b199193a-318b-4093-919e-0840ac1e939d {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.881357] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5046ba3d-167d-494f-a37a-c0ab88029149 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.890045] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-61f90e63-0b09-41f9-ab9b-d8ee5ef27c33 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.923206] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-e5ca63af-7588-4729-9910-df784f1801bc {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.929810] env[68194]: DEBUG oslo_vmware.api [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Task: {'id': task-3466979, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.076072} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2334.931227] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2334.931417] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2334.931595] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2334.931771] env[68194]: INFO nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2334.933506] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-46482db2-cc18-416f-b8c0-427c345f9ded {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2334.935347] env[68194]: DEBUG nova.compute.claims [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2334.935514] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2334.935730] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2334.974757] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2335.025573] env[68194]: DEBUG oslo_vmware.rw_handles [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/275e1ba8-d4db-4ea7-852a-c29a709e06b9/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2335.085671] env[68194]: DEBUG oslo_vmware.rw_handles [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2335.085671] env[68194]: DEBUG oslo_vmware.rw_handles [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Closing write handle for https://esx7c2n3.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/275e1ba8-d4db-4ea7-852a-c29a709e06b9/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2335.116379] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-07e80ece-65b3-405b-9c21-0db808293577 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.123497] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0805ac5d-406b-4919-804b-f1a4518b73e6 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.153214] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-de5de6b7-61dd-458a-bb02-d501db913286 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.159783] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-41178352-058c-4af6-b70d-924e03fe4943 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.173620] env[68194]: DEBUG nova.compute.provider_tree [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2335.182092] env[68194]: DEBUG nova.scheduler.client.report [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2335.215310] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.279s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2335.215860] env[68194]: ERROR nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2335.215860] env[68194]: Faults: ['InvalidArgument'] [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Traceback (most recent call last): [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] self.driver.spawn(context, instance, image_meta, [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] self._fetch_image_if_missing(context, vi) [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] image_cache(vi, tmp_image_ds_loc) [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] vm_util.copy_virtual_disk( [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] session._wait_for_task(vmdk_copy_task) [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] return self.wait_for_task(task_ref) [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] return evt.wait() [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] result = hub.switch() [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] return self.greenlet.switch() [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] self.f(*self.args, **self.kw) [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] raise exceptions.translate_fault(task_info.error) [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Faults: ['InvalidArgument'] [ 2335.215860] env[68194]: ERROR nova.compute.manager [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] [ 2335.216817] env[68194]: DEBUG nova.compute.utils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2335.218025] env[68194]: DEBUG nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Build of instance 8f84a8dc-6908-463c-85e3-f5189e8ca71d was re-scheduled: A specified parameter was not correct: fileType [ 2335.218025] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2335.219037] env[68194]: DEBUG nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2335.219037] env[68194]: DEBUG nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2335.219037] env[68194]: DEBUG nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2335.219180] env[68194]: DEBUG nova.network.neutron [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2335.416270] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2335.416460] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2335.416582] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2335.433789] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2335.433789] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2335.434230] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2335.434230] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2335.434230] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2335.434724] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2335.435386] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2335.445499] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2335.445851] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2335.446100] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2335.446300] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2335.447569] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-962018c6-3425-4f79-bb0e-357ce6c4cf15 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.457069] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-9fb29203-9323-4257-982c-e07dbb468ddb {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.471403] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-b2aa399f-121e-47a2-947e-9e5069cece2a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.477919] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-85be3fcd-74b6-4fd5-bcdb-4a1d4020c624 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.506977] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180958MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2335.507167] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2335.508118] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2335.520036] env[68194]: DEBUG nova.network.neutron [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2335.541463] env[68194]: INFO nova.compute.manager [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Took 0.32 seconds to deallocate network for instance. [ 2335.573458] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8f84a8dc-6908-463c-85e3-f5189e8ca71d has been scheduled to this compute host, the scheduler has made an allocation against this compute node but the instance has yet to start. Skipping heal of allocation: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1764}} [ 2335.573634] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 5fb0537f-884d-421c-9f47-ec8fd7236e54 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2335.574028] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 073be13d-9a6d-4cfc-997b-f6b61710790a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2335.574028] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance c2c4ea7a-0be6-48ec-af75-712929e48a5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2335.574028] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2335.574179] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 727af4d2-ec4c-4ea3-baae-a32a70d03e0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2335.574350] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 5 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2335.574494] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1152MB phys_disk=200GB used_disk=5GB total_vcpus=48 used_vcpus=5 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2335.639151] env[68194]: INFO nova.scheduler.client.report [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Deleted allocations for instance 8f84a8dc-6908-463c-85e3-f5189e8ca71d [ 2335.662513] env[68194]: DEBUG oslo_concurrency.lockutils [None req-ea670977-159c-4620-b4a1-66c314751852 tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "8f84a8dc-6908-463c-85e3-f5189e8ca71d" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 500.842s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2335.662660] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "8f84a8dc-6908-463c-85e3-f5189e8ca71d" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 305.313s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2335.662905] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Acquiring lock "8f84a8dc-6908-463c-85e3-f5189e8ca71d-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2335.663202] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "8f84a8dc-6908-463c-85e3-f5189e8ca71d-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2335.663411] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "8f84a8dc-6908-463c-85e3-f5189e8ca71d-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2335.665829] env[68194]: INFO nova.compute.manager [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Terminating instance [ 2335.669838] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-498d1ccc-b85a-4d51-b9c9-da045d16fa49 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.672936] env[68194]: DEBUG nova.compute.manager [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2335.673190] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2335.673673] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-91c55a52-ae00-4d5b-b41d-051466395005 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.680626] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8a7d5033-23db-47c0-b27d-a2e4cee21ce0 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.687547] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-c767d9cb-07c5-46b6-a23e-9193387faa83 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.721695] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-d3e712d3-cdd8-4265-87ee-8bb90286bf96 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.728466] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-060bae71-32bb-42a0-9c32-e79970899816 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2335.738665] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 8f84a8dc-6908-463c-85e3-f5189e8ca71d could not be found. [ 2335.738855] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2335.739050] env[68194]: INFO nova.compute.manager [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Took 0.07 seconds to destroy the instance on the hypervisor. [ 2335.739323] env[68194]: DEBUG oslo.service.loopingcall [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2335.739868] env[68194]: DEBUG nova.compute.manager [-] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2335.739984] env[68194]: DEBUG nova.network.neutron [-] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2335.748887] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2335.757217] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2335.771042] env[68194]: DEBUG nova.network.neutron [-] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2335.772178] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2335.772364] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.265s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2335.777799] env[68194]: INFO nova.compute.manager [-] [instance: 8f84a8dc-6908-463c-85e3-f5189e8ca71d] Took 0.04 seconds to deallocate network for instance. [ 2335.859414] env[68194]: DEBUG oslo_concurrency.lockutils [None req-b2f39910-9d8a-49b5-8b1b-4f75b4a49ebf tempest-ServerDiskConfigTestJSON-1362896883 tempest-ServerDiskConfigTestJSON-1362896883-project-member] Lock "8f84a8dc-6908-463c-85e3-f5189e8ca71d" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.197s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2336.768690] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2384.989982] env[68194]: WARNING oslo_vmware.rw_handles [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Error occurred while reading the HTTP response.: http.client.RemoteDisconnected: Remote end closed connection without response [ 2384.989982] env[68194]: ERROR oslo_vmware.rw_handles Traceback (most recent call last): [ 2384.989982] env[68194]: ERROR oslo_vmware.rw_handles File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py", line 283, in close [ 2384.989982] env[68194]: ERROR oslo_vmware.rw_handles self._conn.getresponse() [ 2384.989982] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 1375, in getresponse [ 2384.989982] env[68194]: ERROR oslo_vmware.rw_handles response.begin() [ 2384.989982] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 318, in begin [ 2384.989982] env[68194]: ERROR oslo_vmware.rw_handles version, status, reason = self._read_status() [ 2384.989982] env[68194]: ERROR oslo_vmware.rw_handles File "/usr/lib/python3.10/http/client.py", line 287, in _read_status [ 2384.989982] env[68194]: ERROR oslo_vmware.rw_handles raise RemoteDisconnected("Remote end closed connection without" [ 2384.989982] env[68194]: ERROR oslo_vmware.rw_handles http.client.RemoteDisconnected: Remote end closed connection without response [ 2384.989982] env[68194]: ERROR oslo_vmware.rw_handles [ 2384.989982] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Downloaded image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to vmware_temp/275e1ba8-d4db-4ea7-852a-c29a709e06b9/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:258}} [ 2384.992067] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Caching image {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:638}} [ 2384.992294] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Copying Virtual Disk [datastore1] vmware_temp/275e1ba8-d4db-4ea7-852a-c29a709e06b9/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk to [datastore1] vmware_temp/275e1ba8-d4db-4ea7-852a-c29a709e06b9/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk {{(pid=68194) copy_virtual_disk /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1413}} [ 2384.992587] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualDiskManager.CopyVirtualDisk_Task with opID=oslo.vmware-174b2c9d-6ec2-4f61-95fa-576cd7d597ba {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.000411] env[68194]: DEBUG oslo_vmware.api [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 2385.000411] env[68194]: value = "task-3466980" [ 2385.000411] env[68194]: _type = "Task" [ 2385.000411] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2385.008259] env[68194]: DEBUG oslo_vmware.api [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': task-3466980, 'name': CopyVirtualDisk_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2385.510985] env[68194]: DEBUG oslo_vmware.exceptions [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Fault InvalidArgument not matched. {{(pid=68194) get_fault_class /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/exceptions.py:290}} [ 2385.511318] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2385.511913] env[68194]: ERROR nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Instance failed to spawn: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2385.511913] env[68194]: Faults: ['InvalidArgument'] [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Traceback (most recent call last): [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/compute/manager.py", line 2868, in _build_resources [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] yield resources [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] self.driver.spawn(context, instance, image_meta, [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] self._fetch_image_if_missing(context, vi) [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] image_cache(vi, tmp_image_ds_loc) [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] vm_util.copy_virtual_disk( [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] session._wait_for_task(vmdk_copy_task) [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] return self.wait_for_task(task_ref) [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] return evt.wait() [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] result = hub.switch() [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] return self.greenlet.switch() [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] self.f(*self.args, **self.kw) [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] raise exceptions.translate_fault(task_info.error) [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Faults: ['InvalidArgument'] [ 2385.511913] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] [ 2385.512876] env[68194]: INFO nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Terminating instance [ 2385.513770] env[68194]: DEBUG oslo_concurrency.lockutils [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2385.513975] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Creating directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2385.514231] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-a679cc92-95ee-4c9d-9d51-30e3595ea676 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.516364] env[68194]: DEBUG nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2385.516561] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2385.517276] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-cbe65435-7c98-4e0a-8780-2f37b90c250a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.523871] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Unregistering the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1094}} [ 2385.524097] env[68194]: DEBUG oslo_vmware.service [-] Invoking VirtualMachine.UnregisterVM with opID=oslo.vmware-df83a520-356a-4e1f-86ce-7400f28747ca {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.526114] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Created directory with path [datastore1] devstack-image-cache_base {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2385.526289] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Folder [datastore1] devstack-image-cache_base created. {{(pid=68194) _create_folder_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1851}} [ 2385.527221] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-d943d0b5-cd11-4712-94b2-766220efe46b {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.531950] env[68194]: DEBUG oslo_vmware.api [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Waiting for the task: (returnval){ [ 2385.531950] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5259b104-3816-34b0-4d7a-d1dbce9bca72" [ 2385.531950] env[68194]: _type = "Task" [ 2385.531950] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2385.538958] env[68194]: DEBUG oslo_vmware.api [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5259b104-3816-34b0-4d7a-d1dbce9bca72, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2385.588374] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Unregistered the VM {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1097}} [ 2385.588577] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Deleting contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1107}} [ 2385.588760] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Deleting the datastore file [datastore1] 5fb0537f-884d-421c-9f47-ec8fd7236e54 {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:211}} [ 2385.589033] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.DeleteDatastoreFile_Task with opID=oslo.vmware-1c653365-1755-43a8-99e9-d2f9790931cd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2385.597567] env[68194]: DEBUG oslo_vmware.api [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for the task: (returnval){ [ 2385.597567] env[68194]: value = "task-3466982" [ 2385.597567] env[68194]: _type = "Task" [ 2385.597567] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2385.604744] env[68194]: DEBUG oslo_vmware.api [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': task-3466982, 'name': DeleteDatastoreFile_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2386.044536] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Preparing fetch location {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:633}} [ 2386.044893] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Creating directory with path [datastore1] vmware_temp/dec1cc67-93cf-4674-aa35-6396ae46506f/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:399}} [ 2386.044989] env[68194]: DEBUG oslo_vmware.service [-] Invoking FileManager.MakeDirectory with opID=oslo.vmware-6eeff628-c8b2-4e60-8fef-c66fa6fa9270 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.055974] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Created directory with path [datastore1] vmware_temp/dec1cc67-93cf-4674-aa35-6396ae46506f/1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) mkdir /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:404}} [ 2386.056175] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Fetch image to [datastore1] vmware_temp/dec1cc67-93cf-4674-aa35-6396ae46506f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:635}} [ 2386.056347] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to [datastore1] vmware_temp/dec1cc67-93cf-4674-aa35-6396ae46506f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk on the data store datastore1 {{(pid=68194) _fetch_image_as_file /opt/stack/nova/nova/virt/vmwareapi/vmops.py:399}} [ 2386.057050] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-87ef20ed-403c-4c9e-ba94-a8b78da07f93 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.063201] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-054727a4-8619-47dd-a307-79492db04703 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.072749] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-077cc5e6-70d1-4438-b54c-5a9a12b66fa4 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.105487] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a2bd30d0-2ad8-4808-8209-21077731fbfd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.113605] env[68194]: DEBUG oslo_vmware.service [-] Invoking SessionManager.AcquireGenericServiceTicket with opID=oslo.vmware-7df6fbea-72ae-4ad6-8116-2348ba9e2776 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.115220] env[68194]: DEBUG oslo_vmware.api [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Task: {'id': task-3466982, 'name': DeleteDatastoreFile_Task, 'duration_secs': 0.074919} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2386.115457] env[68194]: DEBUG nova.virt.vmwareapi.ds_util [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Deleted the datastore file {{(pid=68194) file_delete /opt/stack/nova/nova/virt/vmwareapi/ds_util.py:220}} [ 2386.115635] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Deleted contents of the VM from datastore datastore1 {{(pid=68194) _destroy_instance /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1117}} [ 2386.115832] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2386.116014] env[68194]: INFO nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Took 0.60 seconds to destroy the instance on the hypervisor. [ 2386.118060] env[68194]: DEBUG nova.compute.claims [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Aborting claim: {{(pid=68194) abort /opt/stack/nova/nova/compute/claims.py:85}} [ 2386.118248] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2386.118465] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2386.136532] env[68194]: DEBUG nova.virt.vmwareapi.images [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Downloading image file data 1feed0b9-f929-4ce4-9c61-ef25290c6d99 to the data store datastore1 {{(pid=68194) fetch_image /opt/stack/nova/nova/virt/vmwareapi/images.py:245}} [ 2386.190440] env[68194]: DEBUG oslo_vmware.rw_handles [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Creating HTTP connection to write to file with size = 21318656 and URL = https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dec1cc67-93cf-4674-aa35-6396ae46506f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) _create_write_connection /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:122}} [ 2386.248959] env[68194]: DEBUG oslo_vmware.rw_handles [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Completed reading data from the image iterator. {{(pid=68194) read /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:765}} [ 2386.249152] env[68194]: DEBUG oslo_vmware.rw_handles [None req-2fe0ab20-d42a-487d-b806-9bebd942110f tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Closing write handle for https://esx7c2n1.openstack.eu-de-1.cloud.sap:443/folder/vmware_temp/dec1cc67-93cf-4674-aa35-6396ae46506f/1feed0b9-f929-4ce4-9c61-ef25290c6d99/tmp-sparse.vmdk?dcPath=ha-datacenter&dsName=datastore1. {{(pid=68194) close /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/rw_handles.py:281}} [ 2386.278246] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-162598f0-e0c7-405e-91c2-67f10e18ebb5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.285514] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-321488c9-3f9d-4c7b-8263-fbdd2e4d5979 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.315776] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-151e0d03-eeea-480d-a87c-845121ef2cb5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.322212] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-089225f7-843e-465d-9ffb-5159a8903ede {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.335136] env[68194]: DEBUG nova.compute.provider_tree [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2386.343405] env[68194]: DEBUG nova.scheduler.client.report [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2386.356616] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.abort_instance_claim" :: held 0.238s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2386.357163] env[68194]: ERROR nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Failed to build and run instance: oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2386.357163] env[68194]: Faults: ['InvalidArgument'] [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Traceback (most recent call last): [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/compute/manager.py", line 2615, in _build_and_run_instance [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] self.driver.spawn(context, instance, image_meta, [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/virt/vmwareapi/driver.py", line 539, in spawn [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] self._vmops.spawn(context, instance, image_meta, injected_files, [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 786, in spawn [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] self._fetch_image_if_missing(context, vi) [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 639, in _fetch_image_if_missing [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] image_cache(vi, tmp_image_ds_loc) [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/virt/vmwareapi/vmops.py", line 537, in _cache_sparse_image [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] vm_util.copy_virtual_disk( [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/virt/vmwareapi/vm_util.py", line 1423, in copy_virtual_disk [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] session._wait_for_task(vmdk_copy_task) [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/nova/nova/virt/vmwareapi/session.py", line 157, in _wait_for_task [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] return self.wait_for_task(task_ref) [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 398, in wait_for_task [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] return evt.wait() [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/event.py", line 125, in wait [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] result = hub.switch() [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/eventlet/hubs/hub.py", line 313, in switch [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] return self.greenlet.switch() [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/common/loopingcall.py", line 75, in _inner [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] self.f(*self.args, **self.kw) [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] File "/opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py", line 448, in _poll_task [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] raise exceptions.translate_fault(task_info.error) [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] oslo_vmware.exceptions.VimFaultException: A specified parameter was not correct: fileType [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Faults: ['InvalidArgument'] [ 2386.357163] env[68194]: ERROR nova.compute.manager [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] [ 2386.358579] env[68194]: DEBUG nova.compute.utils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] VimFaultException {{(pid=68194) notify_about_instance_usage /opt/stack/nova/nova/compute/utils.py:430}} [ 2386.359370] env[68194]: DEBUG nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Build of instance 5fb0537f-884d-421c-9f47-ec8fd7236e54 was re-scheduled: A specified parameter was not correct: fileType [ 2386.359370] env[68194]: Faults: ['InvalidArgument'] {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2454}} [ 2386.359833] env[68194]: DEBUG nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Unplugging VIFs for instance {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:2980}} [ 2386.360025] env[68194]: DEBUG nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Virt driver does not provide unplug_vifs method, so it is not possible determine if VIFs should be unplugged. {{(pid=68194) _cleanup_allocated_networks /opt/stack/nova/nova/compute/manager.py:3003}} [ 2386.360238] env[68194]: DEBUG nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2386.360427] env[68194]: DEBUG nova.network.neutron [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2386.667538] env[68194]: DEBUG nova.network.neutron [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2386.679903] env[68194]: INFO nova.compute.manager [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Took 0.32 seconds to deallocate network for instance. [ 2386.768333] env[68194]: INFO nova.scheduler.client.report [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Deleted allocations for instance 5fb0537f-884d-421c-9f47-ec8fd7236e54 [ 2386.791262] env[68194]: DEBUG oslo_concurrency.lockutils [None req-d7071baa-48c9-44bd-ae7b-72bfa3cea39c tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "5fb0537f-884d-421c-9f47-ec8fd7236e54" "released" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: held 537.151s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2386.791542] env[68194]: DEBUG oslo_concurrency.lockutils [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "5fb0537f-884d-421c-9f47-ec8fd7236e54" acquired by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: waited 340.583s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2386.791771] env[68194]: DEBUG oslo_concurrency.lockutils [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Acquiring lock "5fb0537f-884d-421c-9f47-ec8fd7236e54-events" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2386.791976] env[68194]: DEBUG oslo_concurrency.lockutils [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "5fb0537f-884d-421c-9f47-ec8fd7236e54-events" acquired by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2386.792412] env[68194]: DEBUG oslo_concurrency.lockutils [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "5fb0537f-884d-421c-9f47-ec8fd7236e54-events" "released" by "nova.compute.manager.InstanceEvents.clear_events_for_instance.._clear_events" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2386.794081] env[68194]: INFO nova.compute.manager [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Terminating instance [ 2386.796196] env[68194]: DEBUG nova.compute.manager [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Start destroying the instance on the hypervisor. {{(pid=68194) _shutdown_instance /opt/stack/nova/nova/compute/manager.py:3124}} [ 2386.796196] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Destroying instance {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1142}} [ 2386.796686] env[68194]: DEBUG oslo_vmware.service [-] Invoking SearchIndex.FindAllByUuid with opID=oslo.vmware-b393cc63-a3eb-4487-bd91-55de8b635776 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.805903] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0954da43-8161-41d3-96c6-1ed95c59a2c9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2386.832449] env[68194]: WARNING nova.virt.vmwareapi.vmops [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Instance does not exist on backend: nova.exception.InstanceNotFound: Instance 5fb0537f-884d-421c-9f47-ec8fd7236e54 could not be found. [ 2386.832672] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Instance destroyed {{(pid=68194) destroy /opt/stack/nova/nova/virt/vmwareapi/vmops.py:1144}} [ 2386.833523] env[68194]: INFO nova.compute.manager [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Took 0.04 seconds to destroy the instance on the hypervisor. [ 2386.833523] env[68194]: DEBUG oslo.service.loopingcall [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Waiting for function nova.compute.manager.ComputeManager._try_deallocate_network.._deallocate_network_with_retries to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2386.833663] env[68194]: DEBUG nova.compute.manager [-] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Deallocating network for instance {{(pid=68194) _deallocate_network /opt/stack/nova/nova/compute/manager.py:2263}} [ 2386.833770] env[68194]: DEBUG nova.network.neutron [-] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] deallocate_for_instance() {{(pid=68194) deallocate_for_instance /opt/stack/nova/nova/network/neutron.py:1803}} [ 2386.856441] env[68194]: DEBUG nova.network.neutron [-] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Updating instance_info_cache with network_info: [] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2386.864171] env[68194]: INFO nova.compute.manager [-] [instance: 5fb0537f-884d-421c-9f47-ec8fd7236e54] Took 0.03 seconds to deallocate network for instance. [ 2386.961483] env[68194]: DEBUG oslo_concurrency.lockutils [None req-84e8d477-fdd0-45fa-8ef3-3beb27fef878 tempest-DeleteServersTestJSON-1330604711 tempest-DeleteServersTestJSON-1330604711-project-member] Lock "5fb0537f-884d-421c-9f47-ec8fd7236e54" "released" by "nova.compute.manager.ComputeManager.terminate_instance..do_terminate_instance" :: held 0.170s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2392.417506] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_expired_console_auth_tokens {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.423878] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rebooting_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.424189] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_volume_usage {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.424297] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._reclaim_queued_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2394.424467] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] CONF.reclaim_instance_interval <= 0, skipping... {{(pid=68194) _reclaim_queued_deletes /opt/stack/nova/nova/compute/manager.py:10530}} [ 2395.416778] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._heal_instance_info_cache {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2395.416983] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Starting heal instance info cache {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9911}} [ 2395.417106] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Rebuilding the list of instances to heal {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9915}} [ 2395.430866] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 073be13d-9a6d-4cfc-997b-f6b61710790a] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2395.431206] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: c2c4ea7a-0be6-48ec-af75-712929e48a5b] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2395.431206] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2395.431436] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] [instance: 727af4d2-ec4c-4ea3-baae-a32a70d03e0e] Skipping network cache update for instance because it is Building. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9924}} [ 2395.431587] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Didn't find any instances for network info cache update. {{(pid=68194) _heal_instance_info_cache /opt/stack/nova/nova/compute/manager.py:9997}} [ 2395.432081] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_rescued_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2395.432306] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._instance_usage_audit {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2396.416634] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._check_instance_build_time {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2396.416988] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._sync_scheduler_instance_info {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2396.433075] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._poll_unconfirmed_resizes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2396.433383] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager.update_available_resource {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2396.443963] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2396.444197] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2396.444370] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.clean_compute_node_cache" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2396.444533] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Auditing locally available compute resources for cpu-1 (node: domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28) {{(pid=68194) update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:907}} [ 2396.445949] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-242f15b6-08de-4d28-a97a-b734be7ac2d5 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.454551] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-06b077c4-5d40-46d7-8a13-99b3532d4104 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.468041] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5bef21ae-14ee-4bfb-a9f2-fe4a2bd6b978 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.474627] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-2ac5f346-97f3-4da4-ac46-a01c2d2975f7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.502642] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Hypervisor/Node resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 free_ram=180959MB free_disk=94GB free_vcpus=48 pci_devices=None {{(pid=68194) _report_hypervisor_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1106}} [ 2396.502802] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2396.502978] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2396.635512] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 073be13d-9a6d-4cfc-997b-f6b61710790a actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2396.635512] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance c2c4ea7a-0be6-48ec-af75-712929e48a5b actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2396.635512] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 8c0511d2-b29c-469b-ac57-9bc2b9b6ff47 actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2396.635747] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Instance 727af4d2-ec4c-4ea3-baae-a32a70d03e0e actively managed on this compute host and has allocations in placement: {'resources': {'DISK_GB': 1, 'MEMORY_MB': 128, 'VCPU': 1}}. {{(pid=68194) _remove_deleted_instances_allocations /opt/stack/nova/nova/compute/resource_tracker.py:1707}} [ 2396.635747] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Total usable vcpus: 48, total allocated vcpus: 4 {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1129}} [ 2396.635906] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Final resource view: name=domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 phys_ram=196590MB used_ram=1024MB phys_disk=200GB used_disk=4GB total_vcpus=48 used_vcpus=4 pci_stats=[] {{(pid=68194) _report_final_resource_view /opt/stack/nova/nova/compute/resource_tracker.py:1138}} [ 2396.650989] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing inventories for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:804}} [ 2396.665080] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Updating ProviderTree inventory for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 from _refresh_and_get_inventory using data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) _refresh_and_get_inventory /opt/stack/nova/nova/scheduler/client/report.py:768}} [ 2396.665269] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Updating inventory in ProviderTree for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 with inventory: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:176}} [ 2396.675471] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing aggregate associations for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5, aggregates: None {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:813}} [ 2396.693988] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Refreshing trait associations for resource provider 717076d7-0911-435a-89c8-6f0e41bd02c5, traits: COMPUTE_SAME_HOST_COLD_MIGRATE,COMPUTE_NODE,COMPUTE_IMAGE_TYPE_ISO,COMPUTE_IMAGE_TYPE_VMDK,COMPUTE_NET_ATTACH_INTERFACE {{(pid=68194) _refresh_associations /opt/stack/nova/nova/scheduler/client/report.py:825}} [ 2396.751069] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a798c796-37ad-4f88-a811-2b5413de535a {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.758199] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-a38f6dbd-fe3e-4f16-98b8-ae837caf6046 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.788677] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-10a60f2d-4df0-4d41-a5a4-47e91bd259e3 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.796090] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-66944564-3460-430e-a8fe-95200d412a9c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2396.809214] env[68194]: DEBUG nova.compute.provider_tree [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2396.817241] env[68194]: DEBUG nova.scheduler.client.report [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2396.830673] env[68194]: DEBUG nova.compute.resource_tracker [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Compute_service record updated for cpu-1:domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 {{(pid=68194) _update_available_resource /opt/stack/nova/nova/compute/resource_tracker.py:1067}} [ 2396.830860] env[68194]: DEBUG oslo_concurrency.lockutils [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker._update_available_resource" :: held 0.328s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2402.482427] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquiring lock "43308e03-f3ec-44ae-93ab-6781f04f8170" by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2402.483351] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Lock "43308e03-f3ec-44ae-93ab-6781f04f8170" acquired by "nova.compute.manager.ComputeManager.build_and_run_instance.._locked_do_build_and_run_instance" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2402.492747] env[68194]: DEBUG nova.compute.manager [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Starting instance... {{(pid=68194) _do_build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2406}} [ 2402.540822] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquiring lock "compute_resources" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2402.541084] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Lock "compute_resources" acquired by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2402.542514] env[68194]: INFO nova.compute.claims [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Claim successful on node domain-c8.fc996f14-c53b-4953-92e3-bdfa48f5cc28 [ 2402.656297] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-f7e3c956-8939-41f6-8819-cef1a98e1066 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.665353] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-39dd7c33-5c9f-4a54-a4ff-00c91862c51e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.694824] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-5dd72bff-d44e-41e0-a0e4-bc3584213b4c {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.701582] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-0f8f5208-5044-4277-b272-060b497d6288 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.713916] env[68194]: DEBUG nova.compute.provider_tree [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Inventory has not changed in ProviderTree for provider: 717076d7-0911-435a-89c8-6f0e41bd02c5 {{(pid=68194) update_inventory /opt/stack/nova/nova/compute/provider_tree.py:180}} [ 2402.722273] env[68194]: DEBUG nova.scheduler.client.report [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Inventory has not changed for provider 717076d7-0911-435a-89c8-6f0e41bd02c5 based on inventory data: {'VCPU': {'total': 48, 'reserved': 0, 'min_unit': 1, 'max_unit': 16, 'step_size': 1, 'allocation_ratio': 4.0}, 'MEMORY_MB': {'total': 196590, 'reserved': 512, 'min_unit': 1, 'max_unit': 65530, 'step_size': 1, 'allocation_ratio': 1.0}, 'DISK_GB': {'total': 400, 'reserved': 0, 'min_unit': 1, 'max_unit': 94, 'step_size': 1, 'allocation_ratio': 1.0}} {{(pid=68194) set_inventory_for_provider /opt/stack/nova/nova/scheduler/client/report.py:940}} [ 2402.736762] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Lock "compute_resources" "released" by "nova.compute.resource_tracker.ResourceTracker.instance_claim" :: held 0.196s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2402.737196] env[68194]: DEBUG nova.compute.manager [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Start building networks asynchronously for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2803}} [ 2402.769527] env[68194]: DEBUG nova.compute.utils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Using /dev/sd instead of None {{(pid=68194) get_next_device_name /opt/stack/nova/nova/compute/utils.py:238}} [ 2402.770705] env[68194]: DEBUG nova.compute.manager [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Allocating IP information in the background. {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1956}} [ 2402.770880] env[68194]: DEBUG nova.network.neutron [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] allocate_for_instance() {{(pid=68194) allocate_for_instance /opt/stack/nova/nova/network/neutron.py:1156}} [ 2402.779752] env[68194]: DEBUG nova.compute.manager [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Start building block device mappings for instance. {{(pid=68194) _build_resources /opt/stack/nova/nova/compute/manager.py:2838}} [ 2402.826447] env[68194]: DEBUG nova.policy [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Policy check for network:attach_external_network failed with credentials {'is_admin': False, 'user_id': 'eaa3e83d9acf4b68ab12b2439ad7b513', 'user_domain_id': 'default', 'system_scope': None, 'domain_id': None, 'project_id': 'b53572aed8d9403a8c3c5abf3f070588', 'project_domain_id': 'default', 'roles': ['member', 'reader'], 'is_admin_project': True, 'service_user_id': None, 'service_user_domain_id': None, 'service_project_id': None, 'service_project_domain_id': None, 'service_roles': []} {{(pid=68194) authorize /opt/stack/nova/nova/policy.py:203}} [ 2402.840450] env[68194]: DEBUG nova.compute.manager [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Start spawning the instance on the hypervisor. {{(pid=68194) _build_and_run_instance /opt/stack/nova/nova/compute/manager.py:2612}} [ 2402.865499] env[68194]: DEBUG nova.virt.hardware [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Getting desirable topologies for flavor Flavor(created_at=2025-04-16T19:14:46Z,deleted=False,deleted_at=None,description=None,disabled=False,ephemeral_gb=0,extra_specs={hw_rng:allowed='True'},flavorid='42',id=11,is_public=True,memory_mb=128,name='m1.nano',projects=,root_gb=1,rxtx_factor=1.0,swap=0,updated_at=None,vcpu_weight=0,vcpus=1) and image_meta ImageMeta(checksum='879b8eacf4b511d84bac79c7fe0e0d0a',container_format='bare',created_at=2025-04-16T19:14:30Z,direct_url=,disk_format='vmdk',id=1feed0b9-f929-4ce4-9c61-ef25290c6d99,min_disk=0,min_ram=0,name='cirros-d240228-sparse;paraVirtual;vmxnet3',owner='02796dfa696c46f98aba9ec6c16fb9fb',properties=ImageMetaProps,protected=,size=21318656,status='active',tags=,updated_at=2025-04-16T19:14:31Z,virtual_size=,visibility=), allow threads: False {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:563}} [ 2402.865744] env[68194]: DEBUG nova.virt.hardware [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Flavor limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:348}} [ 2402.865931] env[68194]: DEBUG nova.virt.hardware [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Image limits 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:352}} [ 2402.866134] env[68194]: DEBUG nova.virt.hardware [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Flavor pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:388}} [ 2402.866315] env[68194]: DEBUG nova.virt.hardware [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Image pref 0:0:0 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:392}} [ 2402.866485] env[68194]: DEBUG nova.virt.hardware [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Chose sockets=0, cores=0, threads=0; limits were sockets=65536, cores=65536, threads=65536 {{(pid=68194) get_cpu_topology_constraints /opt/stack/nova/nova/virt/hardware.py:430}} [ 2402.866703] env[68194]: DEBUG nova.virt.hardware [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Topology preferred VirtCPUTopology(cores=0,sockets=0,threads=0), maximum VirtCPUTopology(cores=65536,sockets=65536,threads=65536) {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:569}} [ 2402.866860] env[68194]: DEBUG nova.virt.hardware [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Build topologies for 1 vcpu(s) 1:1:1 {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:471}} [ 2402.867044] env[68194]: DEBUG nova.virt.hardware [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Got 1 possible topologies {{(pid=68194) _get_possible_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:501}} [ 2402.867214] env[68194]: DEBUG nova.virt.hardware [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Possible topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:575}} [ 2402.867386] env[68194]: DEBUG nova.virt.hardware [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Sorted desired topologies [VirtCPUTopology(cores=1,sockets=1,threads=1)] {{(pid=68194) _get_desirable_cpu_topologies /opt/stack/nova/nova/virt/hardware.py:577}} [ 2402.868249] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-fdacb881-d947-454c-8060-05025c7b16b7 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2402.876203] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-3bced019-c70f-417d-85c7-16d9cc26274e {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2403.698223] env[68194]: DEBUG nova.network.neutron [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Successfully created port: 90578676-b915-4173-a148-b24580409ba6 {{(pid=68194) _create_port_minimal /opt/stack/nova/nova/network/neutron.py:548}} [ 2404.186425] env[68194]: DEBUG nova.compute.manager [req-687d59ba-4e03-41f0-ac54-84a68911615a req-3e221fa7-2838-421b-b751-97afdc4a5f3d service nova] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Received event network-vif-plugged-90578676-b915-4173-a148-b24580409ba6 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2404.186649] env[68194]: DEBUG oslo_concurrency.lockutils [req-687d59ba-4e03-41f0-ac54-84a68911615a req-3e221fa7-2838-421b-b751-97afdc4a5f3d service nova] Acquiring lock "43308e03-f3ec-44ae-93ab-6781f04f8170-events" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:404}} [ 2404.186856] env[68194]: DEBUG oslo_concurrency.lockutils [req-687d59ba-4e03-41f0-ac54-84a68911615a req-3e221fa7-2838-421b-b751-97afdc4a5f3d service nova] Lock "43308e03-f3ec-44ae-93ab-6781f04f8170-events" acquired by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: waited 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:409}} [ 2404.187096] env[68194]: DEBUG oslo_concurrency.lockutils [req-687d59ba-4e03-41f0-ac54-84a68911615a req-3e221fa7-2838-421b-b751-97afdc4a5f3d service nova] Lock "43308e03-f3ec-44ae-93ab-6781f04f8170-events" "released" by "nova.compute.manager.InstanceEvents.pop_instance_event.._pop_event" :: held 0.000s {{(pid=68194) inner /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:423}} [ 2404.187318] env[68194]: DEBUG nova.compute.manager [req-687d59ba-4e03-41f0-ac54-84a68911615a req-3e221fa7-2838-421b-b751-97afdc4a5f3d service nova] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] No waiting events found dispatching network-vif-plugged-90578676-b915-4173-a148-b24580409ba6 {{(pid=68194) pop_instance_event /opt/stack/nova/nova/compute/manager.py:320}} [ 2404.187526] env[68194]: WARNING nova.compute.manager [req-687d59ba-4e03-41f0-ac54-84a68911615a req-3e221fa7-2838-421b-b751-97afdc4a5f3d service nova] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Received unexpected event network-vif-plugged-90578676-b915-4173-a148-b24580409ba6 for instance with vm_state building and task_state spawning. [ 2404.261686] env[68194]: DEBUG nova.network.neutron [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Successfully updated port: 90578676-b915-4173-a148-b24580409ba6 {{(pid=68194) _update_port /opt/stack/nova/nova/network/neutron.py:586}} [ 2404.275690] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquiring lock "refresh_cache-43308e03-f3ec-44ae-93ab-6781f04f8170" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2404.275849] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquired lock "refresh_cache-43308e03-f3ec-44ae-93ab-6781f04f8170" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2404.276012] env[68194]: DEBUG nova.network.neutron [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Building network info cache for instance {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2010}} [ 2404.311056] env[68194]: DEBUG nova.network.neutron [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Instance cache missing network info. {{(pid=68194) _get_preexisting_port_ids /opt/stack/nova/nova/network/neutron.py:3323}} [ 2404.461992] env[68194]: DEBUG nova.network.neutron [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Updating instance_info_cache with network_info: [{"id": "90578676-b915-4173-a148-b24580409ba6", "address": "fa:16:3e:53:82:2b", "network": {"id": "e79e376d-55f2-4078-b6ff-dfe489f79178", "bridge": "br-int", "label": "tempest-ServersTestJSON-366329787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b53572aed8d9403a8c3c5abf3f070588", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90578676-b9", "ovs_interfaceid": "90578676-b915-4173-a148-b24580409ba6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2404.474520] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Releasing lock "refresh_cache-43308e03-f3ec-44ae-93ab-6781f04f8170" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2404.474989] env[68194]: DEBUG nova.compute.manager [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Instance network_info: |[{"id": "90578676-b915-4173-a148-b24580409ba6", "address": "fa:16:3e:53:82:2b", "network": {"id": "e79e376d-55f2-4078-b6ff-dfe489f79178", "bridge": "br-int", "label": "tempest-ServersTestJSON-366329787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b53572aed8d9403a8c3c5abf3f070588", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90578676-b9", "ovs_interfaceid": "90578676-b915-4173-a148-b24580409ba6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}]| {{(pid=68194) _allocate_network_async /opt/stack/nova/nova/compute/manager.py:1971}} [ 2404.476060] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Instance VIF info [{'network_name': 'br-int', 'mac_address': 'fa:16:3e:53:82:2b', 'network_ref': {'type': 'OpaqueNetwork', 'network-id': '89f807d9-140f-4a6f-8bce-96795f9482ee', 'network-type': 'nsx.LogicalSwitch', 'use-external-id': True}, 'iface_id': '90578676-b915-4173-a148-b24580409ba6', 'vif_model': 'vmxnet3'}] {{(pid=68194) build_virtual_machine /opt/stack/nova/nova/virt/vmwareapi/vmops.py:279}} [ 2404.483885] env[68194]: DEBUG oslo.service.loopingcall [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Waiting for function nova.virt.vmwareapi.vm_util.create_vm to return. {{(pid=68194) func /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/loopingcall.py:435}} [ 2404.484383] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Creating VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1327}} [ 2404.485120] env[68194]: DEBUG oslo_vmware.service [-] Invoking Folder.CreateVM_Task with opID=oslo.vmware-1b7feeea-7f3f-4a07-bf90-05a27a4586d9 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2404.505538] env[68194]: DEBUG oslo_vmware.api [-] Waiting for the task: (returnval){ [ 2404.505538] env[68194]: value = "task-3466983" [ 2404.505538] env[68194]: _type = "Task" [ 2404.505538] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2404.513591] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466983, 'name': CreateVM_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2405.016062] env[68194]: DEBUG oslo_vmware.api [-] Task: {'id': task-3466983, 'name': CreateVM_Task, 'duration_secs': 0.297285} completed successfully. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:444}} [ 2405.016407] env[68194]: DEBUG nova.virt.vmwareapi.vm_util [-] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Created VM on the ESX host {{(pid=68194) create_vm /opt/stack/nova/nova/virt/vmwareapi/vm_util.py:1349}} [ 2405.016819] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2405.016979] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquired lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2405.017303] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquired external semaphore "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:323}} [ 2405.017555] env[68194]: DEBUG oslo_vmware.service [-] Invoking HostDatastoreBrowser.SearchDatastore_Task with opID=oslo.vmware-57a25f73-79b1-4215-a57b-52212d8f40b1 {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2405.021785] env[68194]: DEBUG oslo_vmware.api [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Waiting for the task: (returnval){ [ 2405.021785] env[68194]: value = "session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5219d0b1-ae89-c782-c1b2-33a0c6e799bd" [ 2405.021785] env[68194]: _type = "Task" [ 2405.021785] env[68194]: } to complete. {{(pid=68194) wait_for_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:397}} [ 2405.028745] env[68194]: DEBUG oslo_vmware.api [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Task: {'id': session[52bc9f01-6174-1b95-477b-9789e1f2a7ed]5219d0b1-ae89-c782-c1b2-33a0c6e799bd, 'name': SearchDatastore_Task} progress is 0%. {{(pid=68194) _poll_task /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/api.py:434}} [ 2405.533036] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Releasing lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2405.533179] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Processing image 1feed0b9-f929-4ce4-9c61-ef25290c6d99 {{(pid=68194) _fetch_image_if_missing /opt/stack/nova/nova/virt/vmwareapi/vmops.py:624}} [ 2405.533276] env[68194]: DEBUG oslo_concurrency.lockutils [None req-dfe63eae-d27b-4b16-aeec-d93f67df3de5 tempest-ServersTestJSON-1869132353 tempest-ServersTestJSON-1869132353-project-member] Acquiring lock "[datastore1] devstack-image-cache_base/1feed0b9-f929-4ce4-9c61-ef25290c6d99/1feed0b9-f929-4ce4-9c61-ef25290c6d99.vmdk" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2406.216204] env[68194]: DEBUG nova.compute.manager [req-c44dd5f1-22a3-4e1e-948f-c9c7a9ac0c8d req-0c085ae3-39c3-45cd-8fb0-75a6f8b475f8 service nova] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Received event network-changed-90578676-b915-4173-a148-b24580409ba6 {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11101}} [ 2406.216460] env[68194]: DEBUG nova.compute.manager [req-c44dd5f1-22a3-4e1e-948f-c9c7a9ac0c8d req-0c085ae3-39c3-45cd-8fb0-75a6f8b475f8 service nova] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Refreshing instance network info cache due to event network-changed-90578676-b915-4173-a148-b24580409ba6. {{(pid=68194) external_instance_event /opt/stack/nova/nova/compute/manager.py:11106}} [ 2406.216677] env[68194]: DEBUG oslo_concurrency.lockutils [req-c44dd5f1-22a3-4e1e-948f-c9c7a9ac0c8d req-0c085ae3-39c3-45cd-8fb0-75a6f8b475f8 service nova] Acquiring lock "refresh_cache-43308e03-f3ec-44ae-93ab-6781f04f8170" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:312}} [ 2406.216826] env[68194]: DEBUG oslo_concurrency.lockutils [req-c44dd5f1-22a3-4e1e-948f-c9c7a9ac0c8d req-0c085ae3-39c3-45cd-8fb0-75a6f8b475f8 service nova] Acquired lock "refresh_cache-43308e03-f3ec-44ae-93ab-6781f04f8170" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:315}} [ 2406.216989] env[68194]: DEBUG nova.network.neutron [req-c44dd5f1-22a3-4e1e-948f-c9c7a9ac0c8d req-0c085ae3-39c3-45cd-8fb0-75a6f8b475f8 service nova] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Refreshing network info cache for port 90578676-b915-4173-a148-b24580409ba6 {{(pid=68194) _get_instance_nw_info /opt/stack/nova/nova/network/neutron.py:2007}} [ 2406.445858] env[68194]: DEBUG nova.network.neutron [req-c44dd5f1-22a3-4e1e-948f-c9c7a9ac0c8d req-0c085ae3-39c3-45cd-8fb0-75a6f8b475f8 service nova] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Updated VIF entry in instance network info cache for port 90578676-b915-4173-a148-b24580409ba6. {{(pid=68194) _build_network_info_model /opt/stack/nova/nova/network/neutron.py:3482}} [ 2406.446249] env[68194]: DEBUG nova.network.neutron [req-c44dd5f1-22a3-4e1e-948f-c9c7a9ac0c8d req-0c085ae3-39c3-45cd-8fb0-75a6f8b475f8 service nova] [instance: 43308e03-f3ec-44ae-93ab-6781f04f8170] Updating instance_info_cache with network_info: [{"id": "90578676-b915-4173-a148-b24580409ba6", "address": "fa:16:3e:53:82:2b", "network": {"id": "e79e376d-55f2-4078-b6ff-dfe489f79178", "bridge": "br-int", "label": "tempest-ServersTestJSON-366329787-network", "subnets": [{"cidr": "192.168.128.0/28", "dns": [], "gateway": {"address": "192.168.128.1", "type": "gateway", "version": 4, "meta": {}}, "ips": [{"address": "192.168.128.12", "type": "fixed", "version": 4, "meta": {}, "floating_ips": []}], "routes": [], "version": 4, "meta": {"enable_dhcp": true, "dhcp_server": "192.168.128.2"}}], "meta": {"injected": false, "tenant_id": "b53572aed8d9403a8c3c5abf3f070588", "mtu": 8950, "physical_network": "default", "tunneled": false}}, "type": "ovs", "details": {"connectivity": "l2", "port_filter": true, "nsx-logical-switch-id": "89f807d9-140f-4a6f-8bce-96795f9482ee", "external-id": "nsx-vlan-transportzone-762", "segmentation_id": 762, "bound_drivers": {"0": "nsxv3"}}, "devname": "tap90578676-b9", "ovs_interfaceid": "90578676-b915-4173-a148-b24580409ba6", "qbh_params": null, "qbg_params": null, "active": true, "vnic_type": "normal", "profile": {}, "preserve_on_delete": false, "delegate_create": true, "meta": {}}] {{(pid=68194) update_instance_cache_with_nw_info /opt/stack/nova/nova/network/neutron.py:116}} [ 2406.457191] env[68194]: DEBUG oslo_concurrency.lockutils [req-c44dd5f1-22a3-4e1e-948f-c9c7a9ac0c8d req-0c085ae3-39c3-45cd-8fb0-75a6f8b475f8 service nova] Releasing lock "refresh_cache-43308e03-f3ec-44ae-93ab-6781f04f8170" {{(pid=68194) lock /opt/stack/data/venv/lib/python3.10/site-packages/oslo_concurrency/lockutils.py:333}} [ 2407.416297] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._run_pending_deletes {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2407.416565] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Cleaning up deleted instances {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11198}} [ 2407.427288] env[68194]: DEBUG nova.compute.manager [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] There are 0 instances to clean {{(pid=68194) _run_pending_deletes /opt/stack/nova/nova/compute/manager.py:11207}} [ 2411.461890] env[68194]: DEBUG oslo_service.periodic_task [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Running periodic task ComputeManager._cleanup_running_deleted_instances {{(pid=68194) run_periodic_tasks /opt/stack/data/venv/lib/python3.10/site-packages/oslo_service/periodic_task.py:210}} [ 2411.462366] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Getting list of instances from cluster (obj){ [ 2411.462366] env[68194]: value = "domain-c8" [ 2411.462366] env[68194]: _type = "ClusterComputeResource" [ 2411.462366] env[68194]: } {{(pid=68194) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2122}} [ 2411.464085] env[68194]: DEBUG oslo_vmware.service [-] Invoking PropertyCollector.RetrievePropertiesEx with opID=oslo.vmware-8690f69b-9782-407e-ab75-73be5d4d33cd {{(pid=68194) request_handler /opt/stack/data/venv/lib/python3.10/site-packages/oslo_vmware/service.py:371}} [ 2411.484776] env[68194]: DEBUG nova.virt.vmwareapi.vmops [None req-a99cb24b-ec8f-4ac2-a29f-e5e62fa0f11b None None] Got total of 5 instances {{(pid=68194) list_instances /opt/stack/nova/nova/virt/vmwareapi/vmops.py:2131}}